def OnAddHeader(self, header): hHash = header.Hash.ToBytes() if hHash not in self._header_index: self._header_index.append(hHash) with self._db.write_batch() as wb: while header.Index - 2000 >= self._stored_header_count: ms = StreamManager.GetStream() w = BinaryWriter(ms) headers_to_write = self._header_index[ self._stored_header_count:self._stored_header_count + 2000] w.Write2000256List(headers_to_write) out = ms.ToArray() StreamManager.ReleaseStream(ms) wb.put( DBPrefix.IX_HeaderHashList + self._stored_header_count.to_bytes(4, 'little'), out) self._stored_header_count += 2000 with self._db.write_batch() as wb: if self._db.get(DBPrefix.DATA_Block + hHash) is None: wb.put(DBPrefix.DATA_Block + hHash, bytes(8) + header.ToArray()) wb.put(DBPrefix.SYS_CurrentHeader, hHash + header.Index.to_bytes(4, 'little'))
def Serialize(self, writer: BinaryWriter): """ Serialize full object. Args: writer (neo.IO.BinaryWriter): """ super(ValidatorState, self).Serialize(writer) self.PublicKey.Serialize(writer) writer.WriteBool(self.Registered) writer.WriteFixed8(self.Votes)
def test_serialize(self): data = b'abc' stream = BytesIO() u1 = UIntBase(3, bytearray(data)) u1.Serialize(BinaryWriter(stream)) self.assertEqual(stream.getvalue(), data) stream = BytesIO() u1 = UIntBase(3, data) u1.Serialize(BinaryWriter(stream)) self.assertEqual(stream.getvalue(), data)
def ToByteArray(self): stream = StreamManager.GetStream() writer = BinaryWriter(stream) self.Serialize(writer) out = stream.getvalue() StreamManager.ReleaseStream(stream) return out
def test_network_addrtime(self): addr = "55.15.69.104" port = 10333 ts = int(datetime.now().timestamp()) services = 0 nawt = NetworkAddressWithTime(addr, port, services, ts) ms = StreamManager.GetStream() writer = BinaryWriter(ms) nawt.Serialize(writer) arr = ms.ToArray() arhex = binascii.unhexlify(arr) StreamManager.ReleaseStream(ms) ms = StreamManager.GetStream(arhex) reader = BinaryReader(ms) nawt2 = NetworkAddressWithTime() nawt2.Deserialize(reader) StreamManager.ReleaseStream(ms) # self.assertEqual(nawt.Address, nawt2.Address) self.assertEqual(nawt.Services, nawt2.Services) self.assertEqual(nawt.Port, nawt2.Port) self.assertEqual(nawt.Timestamp, nawt2.Timestamp)
def test_2_serialize_single_notify_payload(self): sc = NotifyEvent(SmartContractEvent.RUNTIME_NOTIFY, ContractParameter(ContractParameterType.Array, [ContractParameter(ContractParameterType.String, b'hello')]), self.contract_hash, 99, self.event_tx, True, False) stream = StreamManager.GetStream() writer = BinaryWriter(stream) sc.Serialize(writer) out = bytes(stream.getvalue()) self.assertEqual(out, b'\x1cSmartContract.Runtime.Notify\x11\xc4\xd1\xf4\xfb\xa6\x19\xf2b\x88p\xd3n:\x97s\xe8tp[c\x00\x00\x00\x90\xe4\xf1\xbbb\x8e\xf1\x07\xde\xe9\xf0\xd2\x12\xd1w\xbco\x844\x07=\x1b\xa7\x1f\xa7\x94`\x0b\xb4\x88|K\x05hello') StreamManager.ReleaseStream(stream) new_event = SmartContractEvent.FromByteArray(out) self.assertEqual(new_event.event_type, sc.event_type) self.assertEqual(new_event.contract_hash, sc.contract_hash) self.assertEqual(new_event.test_mode, sc.test_mode) self.assertEqual(new_event.tx_hash, sc.tx_hash) self.assertEqual(new_event.block_number, sc.block_number) self.assertEqual(new_event.notify_type, b'hello') self.assertEqual(new_event.AddressFrom, None) self.assertEqual(new_event.AddressTo, None) self.assertEqual(new_event.Amount, 0) self.assertEqual(new_event.is_standard_notify, False)
def test_tx_deserialize(self): ms = MemoryStream(self.tx_raw_hex) reader = BinaryReader(ms) tx = Transaction.DeserializeFrom(reader) self.assertEqual(type(tx), MinerTransaction) self.assertEqual(tx.Hash.ToBytes(), self.tx_id) self.assertEqual(tx.Nonce, self.tx_nonce) self.assertEqual(tx.inputs, []) self.assertEqual(tx.outputs, []) self.assertEqual(tx.scripts, []) ms = MemoryStream() writer = BinaryWriter(ms) tx.Serialize(writer) out = ms.ToArray() self.assertEqual(out, self.tx_raw) json = tx.ToJson() self.assertEqual(json['nonce'], self.tx_nonce)
def test_3_serialize_single_transfer_notify_payload(self): sc = NotifyEvent(SmartContractEvent.RUNTIME_NOTIFY, ContractParameter(ContractParameterType.Array, [ContractParameter(ContractParameterType.String, b'transfer')]), self.contract_hash, 99, self.event_tx, True, False) stream = StreamManager.GetStream() writer = BinaryWriter(stream) sc.Serialize(writer) out = bytes(stream.getvalue()) StreamManager.ReleaseStream(stream) new_event = SmartContractEvent.FromByteArray(out) self.assertEqual(new_event.event_type, sc.event_type) self.assertEqual(new_event.contract_hash, sc.contract_hash) self.assertEqual(new_event.test_mode, sc.test_mode) self.assertEqual(new_event.tx_hash, sc.tx_hash) self.assertEqual(new_event.block_number, sc.block_number) self.assertEqual(new_event.notify_type, b'transfer') self.assertEqual(new_event.AddressFrom, None) self.assertEqual(new_event.AddressTo, None) self.assertEqual(new_event.Amount, 0) self.assertEqual(new_event.is_standard_notify, False) self.assertEqual(new_event.ShouldPersist, False)
def test_6_serialize_full_approve_payload(self): payload = ContractParameter(ContractParameterType.Array, [ ContractParameter(ContractParameterType.String, b'approve'), ContractParameter(ContractParameterType.ByteArray, self.addr_to), ContractParameter(ContractParameterType.ByteArray, self.addr_from), ContractParameter(ContractParameterType.ByteArray, b'x\xe0\x01') ]) sc = NotifyEvent(SmartContractEvent.RUNTIME_NOTIFY, payload, self.contract_hash, 91349, self.event_tx, True, False) stream = StreamManager.GetStream() writer = BinaryWriter(stream) sc.Serialize(writer) out = bytes(stream.getvalue()) StreamManager.ReleaseStream(stream) new_event = SmartContractEvent.FromByteArray(out) self.assertEqual(new_event.event_type, sc.event_type) self.assertEqual(new_event.contract_hash, sc.contract_hash) self.assertEqual(new_event.test_mode, sc.test_mode) self.assertEqual(new_event.tx_hash, sc.tx_hash) self.assertEqual(new_event.block_number, sc.block_number) self.assertEqual(new_event.notify_type, b'approve') self.assertEqual(new_event.AddressFrom, 'AKZmSGPD7ytJBbxpRPmobYGLNxdWH3Jiqs') self.assertEqual(new_event.AddressTo, 'ALb8FEhEmtSqv97fuNVuoLmcmrSKckffRf') self.assertEqual(new_event.Amount, 123000) self.assertEqual(new_event.is_standard_notify, True) self.assertEqual(new_event.ShouldPersist, True)
def test_5_serialize_full_refund_payload(self): payload = ContractParameter(ContractParameterType.Array, [ ContractParameter(ContractParameterType.String, b'refund'), ContractParameter(ContractParameterType.ByteArray, self.addr_to), ContractParameter(ContractParameterType.Integer, 123000) ]) sc = NotifyEvent(SmartContractEvent.RUNTIME_NOTIFY, payload, self.contract_hash, 91349, self.event_tx, True, False) stream = StreamManager.GetStream() writer = BinaryWriter(stream) sc.Serialize(writer) out = bytes(stream.getvalue()) StreamManager.ReleaseStream(stream) new_event = SmartContractEvent.FromByteArray(out) self.assertEqual(new_event.event_type, sc.event_type) self.assertEqual(new_event.contract_hash, sc.contract_hash) self.assertEqual(new_event.test_mode, sc.test_mode) self.assertEqual(new_event.tx_hash, sc.tx_hash) self.assertEqual(new_event.block_number, sc.block_number) self.assertEqual(new_event.notify_type, b'refund') self.assertEqual(new_event.AddressTo, 'AKZmSGPD7ytJBbxpRPmobYGLNxdWH3Jiqs') self.assertEqual(new_event.addr_from, sc.contract_hash) self.assertEqual(new_event.Amount, 123000) self.assertEqual(new_event.is_standard_notify, True)
def Serialize(self, writer: BinaryWriter): """ Serialize full object. Args: writer (neo.IO.BinaryWriter): """ byt = None if self.Type == StateType.Account: byt = b'\x40' elif self.Type == StateType.Validator: byt = b'\x48' writer.WriteByte(byt) writer.WriteVarBytes(self.Key) writer.WriteVarString(self.Field) writer.WriteVarBytes(self.Value)
def UpdateInternal(self, key, value): if self.batch: stream = StreamManager.GetStream() bw = BinaryWriter(stream) value.Serialize(bw) self.batch.put(self.prefix + key, stream.ToArray()) StreamManager.ReleaseStream(stream)
def Trim(self): """ Returns a byte array that contains only the block header and transaction hash. Returns: bytes: """ ms = StreamManager.GetStream() writer = BinaryWriter(ms) self.SerializeUnsigned(writer) writer.WriteByte(1) self.Script.Serialize(writer) writer.WriteHashes([tx.Hash.ToBytes() for tx in self.Transactions]) retVal = ms.ToArray() StreamManager.ReleaseStream(ms) return retVal
def Serialize(self, writer: BinaryWriter): """ Serialize object. Args: writer (neo.IO.BinaryWriter): """ writer.WriteSerializableArray(self.Headers)
def ToByteArray(self): """ Serialize self and get the byte stream. Returns: bytes: serialized object. """ ms = StreamManager.GetStream() writer = BinaryWriter(ms) self.Serialize(writer) retval = ms.ToArray() StreamManager.ReleaseStream(ms) return retval
def ToJson(self): jsn = {} jsn['type'] = 'Neo.Core.ContractTransaction' # Verifiable.GetType().FullName ms = MemoryStream() w = BinaryWriter(ms) self.Verifiable.SerializeUnsigned(w) ms.flush() jsn['hex'] = ms.ToArray().decode() jsn['items'] = {} for key, value in self.ContextItems.items(): if type(key) == str: shkey = "0x{}".format(key) else: shkey = "0x{}".format(key.decode()) jsn['items'][shkey] = value.ToJson() return jsn
def GetHashData(hashable): """ Get the data used for hashing. Args: hashable (neo.IO.Mixins.SerializableMixin): object extending SerializableMixin Returns: bytes: """ ms = StreamManager.GetStream() writer = BinaryWriter(ms) hashable.SerializeUnsigned(writer) ms.flush() retVal = ms.ToArray() StreamManager.ReleaseStream(ms) return retVal
def test_data_received(self, mock): node = NeoNode() node.endpoint = Endpoint('hello.com', 1234) node.host = node.endpoint.host node.port = node.endpoint.port payload = VersionPayload(10234, 1234, 'version') message = Message('version', payload=payload) stream = StreamManager.GetStream() writer = BinaryWriter(stream) message.Serialize(writer) out = stream.getvalue() node.dataReceived(out) mock.assert_called_once() self.assertEqual(node.Version.Nonce, payload.Nonce)
def ToStream(value): """ Serialize the given `value` to a an array of bytes. Args: value (neo.IO.Mixins.SerializableMixin): object extending SerializableMixin. Returns: bytes: not hexlified """ ms = StreamManager.GetStream() writer = BinaryWriter(ms) value.Serialize(writer) retVal = ms.getvalue() StreamManager.ReleaseStream(ms) return retVal
def test_1_serialize_runtime_log(self): sc = SmartContractEvent(SmartContractEvent.RUNTIME_LOG, ContractParameter(ContractParameterType.Array, []), self.contract_hash, 99999, self.event_tx, True, False) stream = StreamManager.GetStream() writer = BinaryWriter(stream) sc.Serialize(writer) out = bytes(stream.getvalue()) self.assertEqual(out, b'\x19SmartContract.Runtime.Log\x11\xc4\xd1\xf4\xfb\xa6\x19\xf2b\x88p\xd3n:\x97s\xe8tp[\x9f\x86\x01\x00\x90\xe4\xf1\xbbb\x8e\xf1\x07\xde\xe9\xf0\xd2\x12\xd1w\xbco\x844\x07=\x1b\xa7\x1f\xa7\x94`\x0b\xb4\x88|K') StreamManager.ReleaseStream(stream) new_event = SmartContractEvent.FromByteArray(out) self.assertEqual(new_event.event_type, sc.event_type) self.assertEqual(new_event.contract_hash, sc.contract_hash) self.assertEqual(new_event.test_mode, sc.test_mode) self.assertEqual(new_event.tx_hash, sc.tx_hash) self.assertEqual(new_event.block_number, sc.block_number)
def test_account_state(self): hash = UInt160(data=self.shash) account = AccountState(script_hash=hash) addr = account.Address self.assertEqual(addr, self.saddr) input = binascii.unhexlify(self.assset) asset = AssetState.DeserializeFromDB(input) account.AddToBalance(asset.AssetId, Fixed8(2440000000)) self.assertEqual(account.BalanceFor(asset.AssetId), Fixed8(2440000000)) account.SubtractFromBalance(asset.AssetId, Fixed8(1220000000)) self.assertEqual(account.BalanceFor(asset.AssetId), Fixed8(1220000000)) self.assertEqual(account.HasBalance(asset.AssetId), True) sshare_hash = Blockchain.SystemShare().Hash account.SubtractFromBalance(sshare_hash, Fixed8(800000000)) self.assertFalse(account.AllBalancesZeroOrLess()) acct_json = account.ToJson() stream = StreamManager.GetStream() writer = BinaryWriter(stream) account.Serialize(writer) out = stream.ToArray() StreamManager.ReleaseStream(stream) input = binascii.unhexlify(out) newaccount = AccountState.DeserializeFromDB(input) self.assertEqual(acct_json, newaccount.ToJson())
def Runtime_Serialize(self, engine: ExecutionEngine): stack_item = engine.CurrentContext.EvaluationStack.Pop() ms = StreamManager.GetStream() writer = BinaryWriter(ms) try: stack_item.Serialize(writer) except Exception as e: logger.error("Cannot serialize item %s: %s " % (stack_item, e)) return False ms.flush() if ms.tell() > engine.maxItemSize: return False retVal = ByteArray(ms.getvalue()) StreamManager.ReleaseStream(ms) engine.CurrentContext.EvaluationStack.PushT(retVal) return True
def test_handle_message(self, mock): node = NeoNode() node.endpoint = Endpoint('hello.com', 1234) node.host = node.endpoint.host node.port = node.endpoint.port payload = VersionPayload(10234, 1234, 'version') message = Message('version', payload=payload) stream = StreamManager.GetStream() writer = BinaryWriter(stream) message.Serialize(writer) out = stream.getvalue() print("OUT %s " % out) out1 = out[0:10] out2 = out[10:20] out3 = out[20:] node.dataReceived(out1) node.dataReceived(out2) self.assertEqual(node.buffer_in, out1 + out2) # import pdb # pdb.set_trace() self.assertEqual(node.bytes_in, 20) mock.assert_not_called() node.dataReceived(out3) self.assertEqual(node.bytes_in, len(out)) # mock.assert_called_with(message) mock.assert_called_once()
def test_message_serialization(self): message = Message('version', payload=self.payload) self.assertEqual(message.Command, 'version') ms = StreamManager.GetStream() writer = BinaryWriter(ms) message.Serialize(writer) result = binascii.unhexlify(ms.ToArray()) StreamManager.ReleaseStream(ms) ms = StreamManager.GetStream(result) reader = BinaryReader(ms) deserialized_message = Message() deserialized_message.Deserialize(reader) StreamManager.ReleaseStream(ms) dm = deserialized_message self.assertEqual(dm.Command, 'version') self.assertEqual(dm.Magic, settings.MAGIC) checksum = Message.GetChecksum(dm.Payload) self.assertEqual(checksum, dm.Checksum) deserialized_version = IOHelper.AsSerializableWithType( dm.Payload, 'neo.Network.Payloads.VersionPayload.VersionPayload') self.assertEqual(deserialized_version.Port, self.port) self.assertEqual(deserialized_version.UserAgent, self.ua) self.assertEqual(deserialized_version.Timestamp, self.payload.Timestamp)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-m", "--mainnet", action="store_true", default=False, help="use MainNet instead of the default TestNet") parser.add_argument("-c", "--config", action="store", help="Use a specific config file") # Where to store stuff parser.add_argument("--datadir", action="store", help="Absolute path to use for database directories") parser.add_argument("-i", "--input", help="Where the input file lives") parser.add_argument("-t", "--totalblocks", help="Total blocks to import", type=int) parser.add_argument("-l", "--logevents", help="Log Smart Contract Events", default=False, action="store_true") parser.add_argument("-n", "--notifications", help="Persist Notifications to database", default=False, action="store_true") parser.add_argument("-a", "--append", action="store_true", default=False, help="Append to current Block database") args = parser.parse_args() if args.mainnet and args.config: print( "Cannot use both --config and --mainnet parameters, please use only one." ) exit(1) # Setting the datadir must come before setting the network, else the wrong path is checked at net setup. if args.datadir: settings.set_data_dir(args.datadir) # Setup depending on command line arguments. By default, the testnet settings are already loaded. if args.config: settings.setup(args.config) elif args.mainnet: settings.setup_mainnet() if args.logevents: settings.log_smart_contract_events = True if not args.input: raise Exception("Please specify an input path") file_path = args.input append = False store_notifications = False start_block = 0 if args.append: append = True if args.notifications: store_notifications = True header_hash_list = [] with open(file_path, 'rb') as file_input: total_blocks_available = int.from_bytes(file_input.read(4), 'little') if total_blocks_available == 0: total_blocks_available = int.from_bytes(file_input.read(4), 'little') total_blocks = total_blocks_available if args.totalblocks and args.totalblocks < total_blocks and args.totalblocks > 0: total_blocks = args.totalblocks target_dir = os.path.join(settings.DATA_DIR_PATH, settings.LEVELDB_PATH) notif_target_dir = os.path.join(settings.DATA_DIR_PATH, settings.NOTIFICATION_DB_PATH) if append: blockchain = LevelDBBlockchain(settings.chain_leveldb_path, skip_header_check=True) Blockchain.RegisterBlockchain(blockchain) start_block = Blockchain.Default().Height print("Starting import at %s " % start_block) else: print("Will import %s of %s blocks to %s" % (total_blocks, total_blocks_available, target_dir)) print( "This will overwrite any data currently in %s and %s.\nType 'confirm' to continue" % (target_dir, notif_target_dir)) try: confirm = prompt("[confirm]> ", is_password=False) except KeyboardInterrupt: confirm = False if not confirm == 'confirm': print("Cancelled operation") return False try: if os.path.exists(target_dir): shutil.rmtree(target_dir) if os.path.exists(notif_target_dir): shutil.rmtree(notif_target_dir) except Exception as e: print("Could not remove existing data %s " % e) return False # Instantiate the blockchain and subscribe to notifications blockchain = LevelDBBlockchain(settings.chain_leveldb_path) Blockchain.RegisterBlockchain(blockchain) chain = Blockchain.Default() if store_notifications: NotificationDB.instance().start() stream = MemoryStream() reader = BinaryReader(stream) block = Block() length_ba = bytearray(4) for index in trange(total_blocks, desc='Importing Blocks', unit=' Block'): # set stream data file_input.readinto(length_ba) block_len = int.from_bytes(length_ba, 'little') reader.stream.write(file_input.read(block_len)) reader.stream.seek(0) # get block block.DeserializeForImport(reader) header_hash_list.append(block.Hash.ToBytes()) # add if block.Index > start_block: chain.AddBlockDirectly(block, do_persist_complete=store_notifications) # reset blockheader block._header = None block.__hash = None # reset stream reader.stream.Cleanup() print("Wrote blocks. Now writing headers") chain = Blockchain.Default() # reset header hash list chain._db.delete(DBPrefix.IX_HeaderHashList) total = len(header_hash_list) chain._header_index = header_hash_list print("storing header hash list...") while total - 2000 >= chain._stored_header_count: ms = StreamManager.GetStream() w = BinaryWriter(ms) headers_to_write = chain._header_index[chain._stored_header_count:chain ._stored_header_count + 2000] w.Write2000256List(headers_to_write) out = ms.ToArray() StreamManager.ReleaseStream(ms) with chain._db.write_batch() as wb: wb.put( DBPrefix.IX_HeaderHashList + chain._stored_header_count.to_bytes(4, 'little'), out) chain._stored_header_count += 2000 last_index = len(header_hash_list) chain._db.put(DBPrefix.SYS_CurrentHeader, header_hash_list[-1] + last_index.to_bytes(4, 'little')) print("Imported %s blocks to %s " % (total_blocks, target_dir))