def DeserializeFromDB(buffer): """ Deserialize full object. Args: buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from. Returns: ValidatorState: """ m = StreamManager.GetStream(buffer) reader = BinaryReader(m) v = ValidatorState() v.Deserialize(reader) StreamManager.ReleaseStream(m) return v
def FromTrimmedData(byts, index=None, transaction_method=None): block = Block() block.__is_trimmed = True ms = StreamManager.GetStream(byts) reader = BinaryReader(ms) block.DeserializeUnsigned(reader) reader.ReadByte() witness = Witness() witness.Deserialize(reader) block.witness = witness block.Transactions = reader.ReadHashes() StreamManager.ReleaseStream(ms) return block
def ToArray(value): """ Serialize the given `value` to a an array of bytes. Args: value (neo.IO.Mixins.SerializableMixin): object extending SerializableMixin. Returns: bytes: """ ms = StreamManager.GetStream() writer = BinaryWriter(ms) value.Serialize(writer) retVal = ms.ToArray() StreamManager.ReleaseStream(ms) return retVal
def test_2_serialize_notify_no_payload(self): sc = SmartContractEvent(SmartContractEvent.RUNTIME_NOTIFY, ContractParameter(ContractParameterType.Array, []), self.contract_hash, 99, self.event_tx, True, False) stream = StreamManager.GetStream() writer = BinaryWriter(stream) sc.Serialize(writer) out = bytes(stream.getvalue()) self.assertEqual(out, b'\x1cSmartContract.Runtime.Notify\x11\xc4\xd1\xf4\xfb\xa6\x19\xf2b\x88p\xd3n:\x97s\xe8tp[c\x00\x00\x00\x90\xe4\xf1\xbbb\x8e\xf1\x07\xde\xe9\xf0\xd2\x12\xd1w\xbco\x844\x07=\x1b\xa7\x1f\xa7\x94`\x0b\xb4\x88|K') StreamManager.ReleaseStream(stream) new_event = SmartContractEvent.FromByteArray(out) self.assertEqual(new_event.event_type, sc.event_type) self.assertEqual(new_event.contract_hash, sc.contract_hash) self.assertEqual(new_event.test_mode, sc.test_mode) self.assertEqual(new_event.tx_hash, sc.tx_hash) self.assertEqual(new_event.block_number, sc.block_number)
def test_account_state(self): hash = UInt160(data=self.shash) account = AccountState(script_hash=hash) addr = account.Address self.assertEqual(addr, self.saddr) input = binascii.unhexlify(self.assset) asset = AssetState.DeserializeFromDB(input) account.AddToBalance(asset.AssetId, Fixed8(2440000000)) self.assertEqual(account.BalanceFor(asset.AssetId), Fixed8(2440000000)) account.SubtractFromBalance(asset.AssetId, Fixed8(1220000000)) self.assertEqual(account.BalanceFor(asset.AssetId), Fixed8(1220000000)) self.assertEqual(account.HasBalance(asset.AssetId), True) sshare_hash = Blockchain.SystemShare().Hash account.SubtractFromBalance(sshare_hash, Fixed8(800000000)) self.assertFalse(account.AllBalancesZeroOrLess()) acct_json = account.ToJson() stream = StreamManager.GetStream() writer = BinaryWriter(stream) account.Serialize(writer) out = stream.ToArray() StreamManager.ReleaseStream(stream) input = binascii.unhexlify(out) newaccount = AccountState.DeserializeFromDB(input) self.assertEqual(acct_json, newaccount.ToJson())
def FromTrimmedData(byts): """ Deserialize a block from raw bytes. Args: byts: Returns: Block: """ block = Block() block.__is_trimmed = True ms = StreamManager.GetStream(byts) reader = BinaryReader(ms) block.DeserializeUnsigned(reader) reader.ReadByte() witness = Witness() witness.Deserialize(reader) block.Script = witness bc = GetBlockchain() tx_list = [] for tx_hash in reader.ReadHashes(): tx = bc.GetTransaction(tx_hash)[0] if not tx: raise Exception( "Could not find transaction!\n Are you running code against a valid Blockchain instance?\n Tests that accesses transactions or size of a block but inherit from NeoTestCase instead of BlockchainFixtureTestCase will not work." ) tx_list.append(tx) if len(tx_list) < 1: raise Exception( "Invalid block, no transactions found for block %s " % block.Index) block.Transactions = tx_list StreamManager.ReleaseStream(ms) return block
def Runtime_Serialize(self, engine: ExecutionEngine): stack_item = engine.CurrentContext.EvaluationStack.Pop() ms = StreamManager.GetStream() writer = BinaryWriter(ms) try: stack_item.Serialize(writer) except Exception as e: logger.error("Cannot serialize item %s: %s " % (stack_item, e)) return False ms.flush() if ms.tell() > engine.maxItemSize: return False retVal = ByteArray(ms.getvalue()) StreamManager.ReleaseStream(ms) engine.CurrentContext.EvaluationStack.PushT(retVal) return True
def OnAddHeader(self, header): hHash = header.Hash.ToBytes() if hHash not in self._header_index: self._header_index.append(hHash) while header.Index - 2000 >= self._stored_header_count: ms = StreamManager.GetStream() w = BinaryWriter(ms) headers_to_write = self._header_index[self._stored_header_count:self._stored_header_count + 2000] w.Write2000256List(headers_to_write) out = ms.ToArray() StreamManager.ReleaseStream(ms) with self._db.write_batch() as wb: wb.put(DBPrefix.IX_HeaderHashList + self._stored_header_count.to_bytes(4, 'little'), out) self._stored_header_count += 2000 with self._db.write_batch() as wb: if self._db.get(DBPrefix.DATA_Block + hHash) is None: wb.put(DBPrefix.DATA_Block + hHash, bytes(8) + header.ToArray()) wb.put(DBPrefix.SYS_CurrentHeader, hHash + header.Index.to_bytes(4, 'little'))
def CheckMessageData(self): if not self.pm: return currentlength = len(self.buffer_in) messageExpectedLength = 24 + self.pm.Length # percentcomplete = int(100 * (currentlength / messageExpectedLength)) # self.Log("Receiving %s data: %s percent complete" % (self.pm.Command, percentcomplete)) if currentlength >= messageExpectedLength: mdata = self.buffer_in[:messageExpectedLength] stream = StreamManager.GetStream(mdata) reader = BinaryReader(stream) message = Message() message.Deserialize(reader) StreamManager.ReleaseStream(stream) self.buffer_in = self.buffer_in[messageExpectedLength:] self.pm = None self.MessageReceived(message) self.reset_counter = False while len(self.buffer_in) > 24 and not self.reset_counter: self.CheckDataReceived() else: self.reset_counter = True
def Dispose(self): self.__OpReader = None StreamManager.ReleaseStream(self.__mstream)
def __init__(self, path): super(RocksDBBlockchain, self).__init__() self._path = path self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header.Hash.ToBytes()) logger.info("start to create database") logger.info("db path is: ", self._path) try: opts = rocksdb.Options() opts.create_if_missing = True opts.prefix_extractor = StaticPrefix() # self._db = plyvel.DB(self._path, create_if_missing=True) self._db = rocksdb.DB(self._path, opts) # self._db = plyvel.DB(self._path, create_if_missing=True, bloom_filter_bits=16, compression=None) logger.info("rocksdb is created successfully") except Exception as e: logger.info( "RocksDB unavailable, you may already be running this process: %s " % e) raise Exception('RocksDB Unavailable') version = self._db.get(DBPrefix.SYS_Version) logger.info("database is created successfully, version is: ", version) logger.info("database is created successfully, self_sysversion is: ", self._sysversion) if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) logger.debug("current block", ba) logger.debug("current block", ba[-4:]) self._current_block_height = int.from_bytes(ba[-4:], 'little') logger.debug("current block height: ", self._current_block_height) ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) logger.debug("current header", ba) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') # logger.info("current header hash!! %s " % current_header_hash) # logger.info("current header height, hashes %s %s %s" %(self._current_block_height, self._header_index, current_header_height) ) hashes = [] try: it = self._db.iteritems() it.seek(DBPrefix.IX_HeaderHashList) # print ( dict(itertools.takewhile(lambda item: item[0].startswith(DBPrefix.IX_HeaderHashList), it))) # for key in self._db.iterkeys().seek(prefix=DBPrefix.IX_HeaderHashList): for key, value in dict( itertools.takewhile( lambda item: item[0].startswith( DBPrefix.IX_HeaderHashList), it)).items(): ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) # hashes.append({'index':int.from_bytes(key, 'little'), 'hash':value}) except Exception as e: logger.info("Couldnt get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] # for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): it = self._db.iteritems() # print ("it before seek ", it) logger.debug("seek key prefix: ", DBPrefix.DATA_Block) it.seek(DBPrefix.DATA_Block) # print ("it after seek ", it) # print ( dict(itertools.takewhile(lambda item: item[0].startswith(DBPrefix.DATA_Block), it))) for key, value in dict( itertools.takewhile( lambda item: item[0].startswith(DBPrefix.DATA_Block ), it)).items(): # print("key is ", key) # print("value is ", value) dbhash = bytearray(value)[8:] # print (bytearray(value)) # print ("dbhash is ", dbhash) headers.append( Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) # print ("header is ", headers) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass else: # with self._db.write_batch() as wb: wb = rocksdb.WriteBatch() for key in self._db.iterkeys(): wb.delete(key) self._db.write(wb) self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion)
def CheckDataReceived(self): """Tries to extract a Message from the data buffer and process it.""" currentLength = len(self.buffer_in) if currentLength < 24: return # Extract the message header from the buffer, and return if not enough # buffer to fully deserialize the message object. try: # Construct message mstart = self.buffer_in[:24] ms = StreamManager.GetStream(mstart) reader = BinaryReader(ms) m = Message() # Extract message metadata m.Magic = reader.ReadUInt32() m.Command = reader.ReadFixedString(12).decode('utf-8') m.Length = reader.ReadUInt32() m.Checksum = reader.ReadUInt32() # Return if not enough buffer to fully deserialize object. messageExpectedLength = 24 + m.Length # percentcomplete = int(100 * (currentLength / messageExpectedLength)) # self.Log("Receiving %s data: %s percent complete" % (m.Command, percentcomplete)) if currentLength < messageExpectedLength: return except Exception as e: self.Log('Error: Could not read initial bytes %s ' % e) return finally: StreamManager.ReleaseStream(ms) del reader # The message header was successfully extracted, and we have enough enough buffer # to extract the full payload try: # Extract message bytes from buffer and truncate buffer mdata = self.buffer_in[:messageExpectedLength] self.buffer_in = self.buffer_in[messageExpectedLength:] # Deserialize message with payload stream = StreamManager.GetStream(mdata) reader = BinaryReader(stream) message = Message() message.Deserialize(reader) # Propagate new message self.MessageReceived(message) except Exception as e: self.Log('Error: Could not extract message: %s ' % e) return finally: StreamManager.ReleaseStream(stream) # Finally, after a message has been fully deserialized and propagated, # check if another message can be extracted with the current buffer: if len(self.buffer_in) >= 24: self.CheckDataReceived()
def CheckDataReceived(self): """Tries to extract a Message from the data buffer and process it.""" currentLength = len(self.buffer_in) if currentLength < 24: return False # Extract the message header from the buffer, and return if not enough # buffer to fully deserialize the message object. try: # Construct message mstart = self.buffer_in[:24] ms = StreamManager.GetStream(mstart) reader = BinaryReader(ms) m = Message() # Extract message metadata m.Magic = reader.ReadUInt32() m.Command = reader.ReadFixedString(12).decode('utf-8') m.Length = reader.ReadUInt32() m.Checksum = reader.ReadUInt32() # Return if not enough buffer to fully deserialize object. messageExpectedLength = 24 + m.Length if currentLength < messageExpectedLength: return False except Exception as e: logger.debug(f"{self.prefix} Error: could not read message header from stream {e}") # self.Log('Error: Could not read initial bytes %s ' % e) return False finally: StreamManager.ReleaseStream(ms) del reader # The message header was successfully extracted, and we have enough enough buffer # to extract the full payload try: # Extract message bytes from buffer and truncate buffer mdata = self.buffer_in[:messageExpectedLength] self.buffer_in = self.buffer_in[messageExpectedLength:] # Deserialize message with payload stream = StreamManager.GetStream(mdata) reader = BinaryReader(stream) message = Message() message.Deserialize(reader) if self.incoming_client and self.expect_verack_next: if message.Command != 'verack': self.Disconnect("Expected 'verack' got {}".format(message.Command)) # Propagate new message self.MessageReceived(message) except Exception as e: logger.debug(f"{self.prefix} Could not extract message {e}") # self.Log('Error: Could not extract message: %s ' % e) return False finally: StreamManager.ReleaseStream(stream) return True
def cleanup(self): if self._stream: StreamManager.ReleaseStream(self._stream)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-m", "--mainnet", action="store_true", default=False, help="use MainNet instead of the default TestNet") parser.add_argument("-c", "--config", action="store", help="Use a specific config file") # Where to store stuff parser.add_argument("--datadir", action="store", help="Absolute path to use for database directories") parser.add_argument("-i", "--input", help="Where the input file lives") parser.add_argument("-t", "--totalblocks", help="Total blocks to import", type=int) parser.add_argument("-l", "--logevents", help="Log Smart Contract Events", default=False, action="store_true") parser.add_argument("-n", "--notifications", help="Persist Notifications to database", default=False, action="store_true") parser.add_argument("-a", "--append", action="store_true", default=False, help="Append to current Block database") args = parser.parse_args() if args.mainnet and args.config: print( "Cannot use both --config and --mainnet parameters, please use only one." ) exit(1) # Setting the datadir must come before setting the network, else the wrong path is checked at net setup. if args.datadir: settings.set_data_dir(args.datadir) # Setup depending on command line arguments. By default, the testnet settings are already loaded. if args.config: settings.setup(args.config) elif args.mainnet: settings.setup_mainnet() if args.logevents: settings.log_smart_contract_events = True if not args.input: raise Exception("Please specify an input path") file_path = args.input append = False store_notifications = False start_block = 0 if args.append: append = True if args.notifications: store_notifications = True header_hash_list = [] with open(file_path, 'rb') as file_input: total_blocks_available = int.from_bytes(file_input.read(4), 'little') if total_blocks_available == 0: total_blocks_available = int.from_bytes(file_input.read(4), 'little') total_blocks = total_blocks_available if args.totalblocks and args.totalblocks < total_blocks and args.totalblocks > 0: total_blocks = args.totalblocks target_dir = os.path.join(settings.DATA_DIR_PATH, settings.LEVELDB_PATH) notif_target_dir = os.path.join(settings.DATA_DIR_PATH, settings.NOTIFICATION_DB_PATH) if append: blockchain = LevelDBBlockchain(settings.chain_leveldb_path, skip_header_check=True) Blockchain.RegisterBlockchain(blockchain) start_block = Blockchain.Default().Height print("Starting import at %s " % start_block) else: print("Will import %s of %s blocks to %s" % (total_blocks, total_blocks_available, target_dir)) print( "This will overwrite any data currently in %s and %s.\nType 'confirm' to continue" % (target_dir, notif_target_dir)) try: confirm = prompt("[confirm]> ", is_password=False) except KeyboardInterrupt: confirm = False if not confirm == 'confirm': print("Cancelled operation") return False try: if os.path.exists(target_dir): shutil.rmtree(target_dir) if os.path.exists(notif_target_dir): shutil.rmtree(notif_target_dir) except Exception as e: print("Could not remove existing data %s " % e) return False # Instantiate the blockchain and subscribe to notifications blockchain = LevelDBBlockchain(settings.chain_leveldb_path) Blockchain.RegisterBlockchain(blockchain) chain = Blockchain.Default() if store_notifications: NotificationDB.instance().start() stream = MemoryStream() reader = BinaryReader(stream) block = Block() length_ba = bytearray(4) for index in trange(total_blocks, desc='Importing Blocks', unit=' Block'): # set stream data file_input.readinto(length_ba) block_len = int.from_bytes(length_ba, 'little') reader.stream.write(file_input.read(block_len)) reader.stream.seek(0) # get block block.DeserializeForImport(reader) header_hash_list.append(block.Hash.ToBytes()) # add if block.Index > start_block: chain.AddBlockDirectly(block, do_persist_complete=store_notifications) # reset blockheader block._header = None block.__hash = None # reset stream reader.stream.Cleanup() print("Wrote blocks. Now writing headers") chain = Blockchain.Default() # reset header hash list chain._db.delete(DBPrefix.IX_HeaderHashList) total = len(header_hash_list) chain._header_index = header_hash_list print("storing header hash list...") while total - 2000 >= chain._stored_header_count: ms = StreamManager.GetStream() w = BinaryWriter(ms) headers_to_write = chain._header_index[chain._stored_header_count:chain ._stored_header_count + 2000] w.Write2000256List(headers_to_write) out = ms.ToArray() StreamManager.ReleaseStream(ms) with chain._db.write_batch() as wb: wb.put( DBPrefix.IX_HeaderHashList + chain._stored_header_count.to_bytes(4, 'little'), out) chain._stored_header_count += 2000 last_index = len(header_hash_list) chain._db.put(DBPrefix.SYS_CurrentHeader, header_hash_list[-1] + last_index.to_bytes(4, 'little')) print("Imported %s blocks to %s " % (total_blocks, target_dir))
def AddSignature(self, contract, pubkey, signature): if contract.Type == ContractType.MultiSigContract: item = self.CreateItem(contract) if item is None: return False for p in item.ContractParameters: if p.Value is not None: return False if item.Signatures is None: item.Signatures = {} elif pubkey.encode_point(True) in item.Signatures: return False ecdsa = ECDSA.secp256r1() points = [] temp = binascii.unhexlify(contract.Script) ms = StreamManager.GetStream(binascii.unhexlify(contract.Script)) reader = BinaryReader(ms) numr = reader.ReadUInt8() try: while reader.ReadUInt8() == 33: ecpoint = ecdsa.ec.decode_from_hex( binascii.hexlify(reader.ReadBytes(33)).decode()) points.append(ecpoint) except ValueError: return False finally: StreamManager.ReleaseStream(ms) if pubkey not in points: return False item.Signatures[pubkey.encode_point( True).decode()] = binascii.hexlify(signature) if len(item.Signatures) == len(contract.ParameterList): i = 0 points.sort(reverse=True) for k in points: pubkey = k.encode_point(True).decode() if pubkey in item.Signatures: if self.Add(contract, i, item.Signatures[pubkey]) is None: raise Exception("Invalid operation") i += 1 item.Signatures = None return True else: index = -1 if contract.ParameterList == '00': contract.ParameterList = b'\x00' length = len(contract.ParameterList) for i in range(0, length): if ContractParameterType(contract.ParameterList[i] ) == ContractParameterType.Signature: if index >= 0: raise Exception("Signature must be first") else: index = i return self.Add(contract, index, signature)
def __init__(self, path): super(LevelDBBlockchain, self).__init__() self._path = path self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header.Hash.ToBytes()) try: self._db = plyvel.DB(self._path, create_if_missing=True) # self._db = plyvel.DB(self._path, create_if_missing=True, bloom_filter_bits=16, compression=None) except Exception as e: logger.info( "leveldb unavailable, you may already be running this process: %s " % e) raise Exception('Leveldb Unavailable') version = self._db.get(DBPrefix.SYS_Version) if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') # logger.info("current header hash!! %s " % current_header_hash) # logger.info("current header height, hashes %s %s %s" %(self._current_block_height, self._header_index, current_header_height) ) hashes = [] try: for key, value in self._db.iterator( prefix=DBPrefix.IX_HeaderHashList): ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) # hashes.append({'index':int.from_bytes(key, 'little'), 'hash':value}) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] for key, value in self._db.iterator( prefix=DBPrefix.DATA_Block): dbhash = bytearray(value)[8:] headers.append( Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass else: with self._db.write_batch() as wb: for key, value in self._db.iterator(): wb.delete(key) self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-m", "--mainnet", action="store_true", default=False, help="use MainNet instead of the default TestNet") parser.add_argument("-c", "--config", action="store", help="Use a specific config file") # Where to store stuff parser.add_argument("--datadir", action="store", help="Absolute path to use for database directories") parser.add_argument("-i", "--input", help="Where the input file lives") parser.add_argument("-t", "--totalblocks", help="Total blocks to import", type=int) parser.add_argument("-l", "--logevents", help="Log Smart Contract Events", default=False, action="store_true") args = parser.parse_args() if args.mainnet and args.config: print( "Cannot use both --config and --mainnet parameters, please use only one." ) exit(1) # Setting the datadir must come before setting the network, else the wrong path is checked at net setup. if args.datadir: settings.set_data_dir(args.datadir) # Setup depending on command line arguments. By default, the testnet settings are already loaded. if args.config: settings.setup(args.config) elif args.mainnet: settings.setup_mainnet() if args.logevents: settings.log_smart_contract_events = True if not args.input: raise Exception("Please specify an input path") file_path = args.input with open(file_path, 'rb') as file: total_blocks = int.from_bytes(file.read(4), 'little') target_dir = os.path.join(settings.DATA_DIR_PATH, settings.LEVELDB_PATH) notif_target_dir = os.path.join(settings.DATA_DIR_PATH, settings.NOTIFICATION_DB_PATH) print("Will import %s blocks to %s" % (total_blocks, target_dir)) print( "This will overwrite any data currently in %s and %s.\nType 'confirm' to continue" % (target_dir, notif_target_dir)) confirm = prompt("[confirm]> ", is_password=False) if not confirm == 'confirm': print("Cancelled operation") return False try: if os.path.exists(target_dir): shutil.rmtree(target_dir) if os.path.exists(notif_target_dir): shutil.rmtree(notif_target_dir) except Exception as e: print("Could not remove existing data %s " % e) return False # Instantiate the blockchain and subscribe to notifications blockchain = LevelDBBlockchain(settings.chain_leveldb_path) Blockchain.RegisterBlockchain(blockchain) chain = Blockchain.Default() for index in trange(total_blocks, desc='Importing Blocks', unit=' Block'): block_len = int.from_bytes(file.read(4), 'little') stream = StreamManager.GetStream(file.read(block_len)) reader = BinaryReader(stream) block = Block() block.Deserialize(reader) StreamManager.ReleaseStream(stream) if block.Index > 0: chain.AddBlockDirectly(block) file.close() print("Imported %s blocks to %s " % (total_blocks, target_dir))
def __init__(self, path, skip_version_check=False): super(LevelDBBlockchain, self).__init__() self._path = path self._header_index = [] self._header_index.append(Blockchain.GenesisBlock().Header.Hash.ToBytes()) self.TXProcessed = 0 print(TXProcessed) print("==========begin===========") print(Blockchain.GenesisBlock()) try: self._db = plyvel.DB(self._path, create_if_missing=True) # self._db = plyvel.DB(self._path, create_if_missing=True, bloom_filter_bits=16, compression=None) logger.info("Created Blockchain DB at %s " % self._path) except Exception as e: logger.info("leveldb unavailable, you may already be running this process: %s " % e) raise Exception('Leveldb Unavailable') version = self._db.get(DBPrefix.SYS_Version) print(version) if skip_version_check: self._db.put(DBPrefix.SYS_Version, self._sysversion) version = self._sysversion if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') print("Block height") print(self._current_block_height) ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') # logger.info("current header hash!! %s " % current_header_hash) # logger.info("current header height, hashes %s %s %s" %(self._current_block_height, self._header_index, current_header_height) ) hashes = [] try: for key, value in self._db.iterator(prefix=DBPrefix.IX_HeaderHashList): ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) # hashes.append({'index':int.from_bytes(key, 'little'), 'hash':value}) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) print("hashes") print(len(hashes)) print(hashes) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): dbhash = bytearray(value)[8:] headers.append(Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass elif version is None: self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion) else: logger.error("\n\n") logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) logger.warning("You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain.") res = prompt("Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> ") if res == 'continue': with self._db.write_batch() as wb: for key, value in self._db.iterator(): wb.delete(key) self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion) else: raise Exception("Database schema changed")
def __init__(self, db, skip_version_check=False, skip_header_check=False): self._db = db self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header.Hash.ToBytes()) self.TXProcessed = 0 version = self._db.get(DBPrefix.SYS_Version) if skip_version_check: self._db.write(DBPrefix.SYS_Version, self._sysversion) version = self._sysversion if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') if not skip_header_check: ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') hashes = [] try: with self._db.openIter( DBProperties(DBPrefix.IX_HeaderHashList)) as it: for key, value in it: ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: logger.info( "Current stored headers empty, re-creating from stored blocks..." ) headers = [] logger.info('Recreate headers') with self._db.openIter(DBProperties( DBPrefix.DATA_Block)) as it: for key, value in it: dbhash = bytearray(value)[8:] headers.append( Header.FromTrimmedData( binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) if len(headers): self.OnAddHeader(headers[-1]) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass elif version is None: wait_for(self.Persist(Blockchain.GenesisBlock())) self._db.write(DBPrefix.SYS_Version, self._sysversion) else: logger.error("\n\n") logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) logger.warning( "You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain." ) res = prompt( "Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> " ) if res == 'continue': with self._db.getBatch() as wb: with self._db.openIter( DBProperties(include_value=False)) as it: for key in it: wb.delete(key) wait_for(self.Persist(Blockchain.GenesisBlock())) self._db.write(DBPrefix.SYS_Version, self._sysversion) else: raise Exception("Database schema changed")