def search_txhash(self, txhash): # txhash is unique due to nonce. err = { 'status': 'Error', 'error': 'txhash not found', 'method': 'txhash', 'parameter': txhash } for tx in self.chain.transaction_pool: if tx.txhash == txhash: logger.info('%s found in transaction pool..', txhash) tx_new = copy.deepcopy(tx) self.reformat_txn(tx_new) return json_print_telnet(tx_new) try: txn_metadata = self.chain.state.db.get(txhash) except: logger.info( '%s does not exist in memory pool or local blockchain..', txhash) return json_print_telnet(err) json_tx = json.loads(txn_metadata[0]) tx = Transaction().from_txdict(json_tx) tx.blocknumber = txn_metadata[1] tx.confirmations = self.chain.height() - tx.blocknumber tx.timestamp = txn_metadata[2] tx_new = copy.deepcopy(tx) self.reformat_txn(tx_new) logger.info('%s found in block %s', txhash, str(txn_metadata[1])) tx_new.status = 'ok' return json_print_telnet(tx_new)
def validate_block(self, block, address_txn) -> bool: len_transactions = len(block.transactions) if len_transactions < 1: return False coinbase_tx = Transaction.from_pbdata(block.transactions[0]) if not isinstance(coinbase_tx, CoinBase): return False if not coinbase_tx.validate_extended(): return False if not PoWValidator().validate_mining_nonce(self.state, block.blockheader): return False coinbase_tx.apply_on_state(address_txn) # TODO: check block reward must be equal to coinbase amount for tx_idx in range(1, len_transactions): tx = Transaction.from_pbdata(block.transactions[tx_idx]) if isinstance(tx, CoinBase): return False if not tx.validate( ): # TODO: Move this validation, before adding txn to pool return False addr_from_pk_state = address_txn[tx.addr_from] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = address_txn[addr_from_pk] if not tx.validate_extended(address_txn[tx.addr_from], addr_from_pk_state): return False expected_nonce = addr_from_pk_state.nonce + 1 if tx.nonce != expected_nonce: logger.warning('nonce incorrect, invalid tx') logger.warning('subtype: %s', tx.type) logger.warning('%s actual: %s expected: %s', tx.addr_from, tx.nonce, expected_nonce) return False if addr_from_pk_state.ots_key_reuse(tx.ots_key): logger.warning('pubkey reuse detected: invalid tx %s', tx.txhash) logger.warning('subtype: %s', tx.type) return False tx.apply_on_state(address_txn) return True
def test_calc_allowed_decimals(self): decimal = Transaction.calc_allowed_decimals(10000000000000000000) self.assertEqual(decimal, 0) decimal = Transaction.calc_allowed_decimals(1) self.assertEqual(decimal, 19) decimal = Transaction.calc_allowed_decimals(2) self.assertEqual(decimal, 18)
def load(self, genesis_block): height = self.state.get_mainchain_height() if height == -1: self.state.put_block(genesis_block, None) block_number_mapping = qrl_pb2.BlockNumberMapping(headerhash=genesis_block.headerhash, prev_headerhash=genesis_block.prev_headerhash) self.state.put_block_number_mapping(genesis_block.block_number, block_number_mapping, None) parent_difficulty = StringToUInt256(str(config.dev.genesis_difficulty)) self.current_difficulty, _ = DifficultyTracker.get( measurement=config.dev.mining_setpoint_blocktime, parent_difficulty=parent_difficulty) block_metadata = BlockMetadata.create() block_metadata.set_orphan(False) block_metadata.set_block_difficulty(self.current_difficulty) block_metadata.set_cumulative_difficulty(self.current_difficulty) self.state.put_block_metadata(genesis_block.headerhash, block_metadata, None) addresses_state = dict() for genesis_balance in GenesisBlock().genesis_balance: bytes_addr = genesis_balance.address addresses_state[bytes_addr] = AddressState.get_default(bytes_addr) addresses_state[bytes_addr]._data.balance = genesis_balance.balance for tx_idx in range(1, len(genesis_block.transactions)): tx = Transaction.from_pbdata(genesis_block.transactions[tx_idx]) for addr in tx.addrs_to: addresses_state[addr] = AddressState.get_default(addr) coinbase_tx = Transaction.from_pbdata(genesis_block.transactions[0]) if not isinstance(coinbase_tx, CoinBase): return False addresses_state[coinbase_tx.addr_to] = AddressState.get_default(coinbase_tx.addr_to) if not coinbase_tx.validate_extended(): return False coinbase_tx.apply_on_state(addresses_state) for tx_idx in range(1, len(genesis_block.transactions)): tx = Transaction.from_pbdata(genesis_block.transactions[tx_idx]) tx.apply_on_state(addresses_state) self.state.state_objects.update_current_state(addresses_state) self.state.state_objects.update_tx_metadata(genesis_block, None) self.state.state_objects.push(genesis_block.headerhash) else: self.last_block = self.get_block_by_number(height) self.current_difficulty = self.state.get_block_metadata(self.last_block.headerhash).block_difficulty
def __next__(self): if not self.transaction_pool_obj.pending_tx_pool: raise StopIteration if len(self.transaction_pool_obj.transaction_pool) >= config.dev.transaction_pool_size: raise StopIteration tx = self.transaction_pool_obj.pending_tx_pool.pop(0) tx = tx[0] if not tx.validate(): return False addr_from_state = self.state.get_address(address=tx.txfrom) addr_from_pk_state = addr_from_state addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = self.state.get_address(address=addr_from_pk) is_valid_state = tx.validate_extended(addr_from_state=addr_from_state, addr_from_pk_state=addr_from_pk_state, transaction_pool=self.transaction_pool_obj.transaction_pool) is_valid_pool_state = tx.validate_transaction_pool(self.transaction_pool_obj.transaction_pool) if not (is_valid_state and is_valid_pool_state): logger.info('>>>TX %s failed state_validate', tx.txhash) return False logger.info('A TXN has been Processed %s', bin2hstr(tx.txhash)) self.transaction_pool_obj.add_tx_to_pool(tx) self.broadcast_tx(tx) return True
def test_from_json(self): tx = Transaction.from_json(test_json_Simple) tx.sign(self.alice) self.assertIsInstance(tx, TransferTransaction) # Test that common Transaction components were copied over. self.assertEqual(0, tx.nonce) self.assertEqual( '010300a1da274e68c88b0ccf448e0b1916fa789b01eb2ed4e9ad565ce264c9390782a9c61ac02f', bin2hstr(tx.addr_from)) self.assertEqual( '01030038ea6375069f8272cc1a6601b3c76c21519455603d370036b97c779ada356' '5854e3983bd564298c49ae2e7fa6e28d4b954d8cd59398f1225b08d6144854aee0e', bin2hstr(tx.PK)) self.assertEqual( '554f546305d4aed6ec71c759942b721b904ab9d65eeac3c954c08c652181c4e8', bin2hstr(tx.txhash)) self.assertEqual(10, tx.ots_key) self.assertEqual(test_signature_Simple, bin2hstr(tx.signature)) # Test that specific content was copied over. self.assertEqual( '0103001d65d7e59aed5efbeae64246e0f3184d7c42411421eb385ba30f2c1c005a85ebc4419cfd', bin2hstr(tx.addrs_to[0])) self.assertEqual(100, tx.total_amount) self.assertEqual(1, tx.fee)
def test_from_json(self): tx = Transaction.from_json(test_json_Token) tx.sign(self.alice) self.assertIsInstance(tx, TokenTransaction) # Test that common Transaction components were copied over. self.assertEqual( '010300a1da274e68c88b0ccf448e0b1916fa789b01eb2ed4e9ad565ce264c9390782a9c61ac02f', bin2hstr(tx.addr_from)) self.assertEqual( '01030038ea6375069f8272cc1a6601b3c76c21519455603d370036b97c779ada356' '5854e3983bd564298c49ae2e7fa6e28d4b954d8cd59398f1225b08d6144854aee0e', bin2hstr(tx.PK)) self.assertEqual(b'QRL', tx.symbol) self.assertEqual(b'Quantum Resistant Ledger', tx.name) self.assertEqual( '010317463dcd581b679b4754f46c6425125489a2826894e3c42a590efb6806450ce6bf52716c', bin2hstr(tx.owner)) self.assertEqual( 'ff84da605e9c9cd04d68503be7922110b4cc147837f8687ad18aa54b7bc5632d', bin2hstr(tx.txhash)) self.assertEqual(10, tx.ots_key) self.assertEqual(test_signature_Token, bin2hstr(tx.signature)) total_supply = 0 for initial_balance in tx.initial_balances: total_supply += initial_balance.amount self.assertEqual(600000000, total_supply) self.assertEqual(1, tx.fee)
def test_from_json(self): tx = Transaction.from_json(test_json_TransferToken) tx.sign(self.alice) self.assertIsInstance(tx, TransferTokenTransaction) # Test that common Transaction components were copied over. self.assertEqual( '010300a1da274e68c88b0ccf448e0b1916fa789b01eb2ed4e9ad565ce264c9390782a9c61ac02f', bin2hstr(tx.addr_from)) self.assertEqual( '01030038ea6375069f8272cc1a6601b3c76c21519455603d370036b97c779ada356' '5854e3983bd564298c49ae2e7fa6e28d4b954d8cd59398f1225b08d6144854aee0e', bin2hstr(tx.PK)) self.assertEqual(b'000000000000000', tx.token_txhash) self.assertEqual(200000, tx.total_amount) self.assertEqual( '390b159b34cffd29d4271a19679ff227df2ccd471078f177a7b58ca5f5d999f0', bin2hstr(tx.txhash)) self.assertEqual(10, tx.ots_key) # z = bin2hstr(tx.signature) # print('"', end='') # for i in range(len(z)): # print(z[i], end='') # if (i + 1) % 64 == 0: # print('" \\', end='') # print('') # print(' ' * len('test_signature_TransferToken = '), end='') # print('"', end='') self.assertEqual(test_signature_TransferToken, bin2hstr(tx.signature)) self.assertEqual(1, tx.fee)
def last_unconfirmed_tx(self, data=None): logger.info('<<< API last_unconfirmed_tx call') addr = {'transactions': []} error = {'status': 'error', 'error': 'invalid argument', 'method': 'last_tx', 'parameter': data} if not data: data = 1 try: n = int(data) except: return json_print_telnet(error) if n <= 0 or n > 20: return json_print_telnet(error) tx_num = len(self.factory.chain.transaction_pool) while tx_num > 0: tx_num -= 1 tx = self.factory.chain.transaction_pool[tx_num] if tx.subtype != TX_SUBTYPE_TX: continue tmp_txn = {'txhash': bin2hstr(tx.txhash), 'block': 'unconfirmed', 'timestamp': 'unconfirmed', 'amount': self.factory.format_qrlamount(tx.amount), 'type': tx.subtype} tmp_txn['type'] = Transaction.tx_id_to_name(tmp_txn['type']) addr['transactions'].append(tmp_txn) addr['status'] = 'ok' return json_print_telnet(addr)
def test_from_json(self): tx = Transaction.from_json(test_json_TransferToken) self.assertIsInstance(tx, TransferTokenTransaction) self.assertEqual(tx.subtype, qrl_pb2.Transaction.TRANSFERTOKEN) # Test that common Transaction components were copied over. self.assertEqual( b'Q223bc5e5b78edfd778b1bf72702061cc053010711ffeefb9d969318be5d7b86b021b73c2', tx.txfrom) self.assertEqual( '3c523f9cc26f800863c003524392806ff6df373acb4d47cc607b62365fe4ab77' 'cf3018d321df7dcb653c9f7968673e43d12cc26e3461b5f425fd5d977400fea5', bin2hstr(tx.PK)) self.assertEqual(b'000000000000000', tx.token_txhash) self.assertEqual(200000, tx.amount) self.assertEqual( '712ec52c483d1e513b83fd4d1210fd943903ae88e0c8048058b06d4e28a8727b', bin2hstr(tx.txhash)) self.assertEqual(10, tx.ots_key) self.assertEqual(b'', tx.signature) self.assertEqual( 'e2e3d8b08e65b25411af455eb9bb402827fa7b600fa0b36011d62e26899dfa05', bin2hstr(tx.pubhash)) self.assertEqual(1, tx.fee)
def handle_transfer_token_transaction( source, message: qrllegacy_pb2.LegacyMessage): """ Transfer Token Transaction This function processes whenever a Transaction having subtype TRANSFERTOKEN is received. :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.TT) try: tx = Transaction.from_pbdata(message.ttData) except Exception as e: logger.error( 'Transfer Token Txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if not source.factory.master_mr.isRequested(tx.get_message_hash(), source): return source.factory.add_unprocessed_txn(tx, source.peer_ip)
def DT(self, data): """ Duplicate Transaction This function processes whenever a Transaction having subtype DT is received. :return: """ try: duplicate_txn = Transaction.from_json(data) except Exception as e: logger.error('DT rejected') logger.exception(e) self.transport.loseConnection() return if not self.factory.master_mr.isRequested( duplicate_txn.get_message_hash(), self): return if duplicate_txn.get_message_hash( ) in self.factory.buffered_chain.tx_pool.duplicate_tx_pool: return # TODO: State validate for duplicate_txn is pending if duplicate_txn.validate(): self.factory.buffered_chain.tx_pool.add_tx_to_duplicate_pool( duplicate_txn) else: logger.debug('>>>Invalid DT txn %s', bin2hstr(duplicate_txn.get_message_hash())) return self.factory.register_and_broadcast('DT', duplicate_txn.get_message_hash(), duplicate_txn.to_json())
def test_from_json(self): tx = Transaction.from_json(test_json_Simple) self.assertIsInstance(tx, TransferTransaction) self.assertEqual(tx.subtype, qrl_pb2.Transaction.TRANSFER) # Test that common Transaction components were copied over. self.assertEqual(0, tx.nonce) self.assertEqual( b'Q223bc5e5b78edfd778b1bf72702061cc053010711ffeefb9d969318be5d7b86b021b73c2', tx.txfrom) self.assertEqual( '3c523f9cc26f800863c003524392806ff6df373acb4d47cc607b62365fe4ab77' 'cf3018d321df7dcb653c9f7968673e43d12cc26e3461b5f425fd5d977400fea5', bin2hstr(tx.PK)) self.assertEqual( '986299314d1489f0c23d70b689639c9c0059588563582cb7b21439b61583a5c0', bin2hstr(tx.txhash)) self.assertEqual(10, tx.ots_key) self.assertEqual(b'', tx.signature) self.assertEqual( 'e2e3d8b08e65b25411af455eb9bb402827fa7b600fa0b36011d62e26899dfa05', bin2hstr(tx.pubhash)) # Test that specific content was copied over. self.assertEqual( b'Qfd5d64455903b8e500a14cafb1c4ea95a1f97562aaaa24d83e5b9dc3861a47386ce9ad15', tx.txto) self.assertEqual(100, tx.amount) self.assertEqual(1, tx.fee)
def __next__(self): tx = self.transaction_pool_obj.get_pending_transaction() if not tx: raise StopIteration if not tx.validate(): return False addr_from_state = self.state.get_address(address=tx.addr_from) addr_from_pk_state = addr_from_state addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = self.state.get_address(address=addr_from_pk) is_valid_state = tx.validate_extended(addr_from_state=addr_from_state, addr_from_pk_state=addr_from_pk_state) is_valid_pool_state = tx.validate_transaction_pool(self.transaction_pool_obj.transaction_pool) if not (is_valid_state and is_valid_pool_state): logger.info('>>>TX %s failed state_validate', tx.txhash) return False logger.info('A TXN has been Processed %s', bin2hstr(tx.txhash)) self.transaction_pool_obj.add_tx_to_pool(tx) self.transaction_pool_obj.append_addr_ots_hash(tx) self.broadcast_tx(tx) return True
def test_from_json(self): tx = Transaction.from_json(test_json_Stake) self.assertIsInstance(tx, StakeTransaction) # Test that common Transaction components were copied over. self.assertEqual(0, tx.nonce) self.assertEqual( b'Q223bc5e5b78edfd778b1bf72702061cc053010711ffeefb9d969318be5d7b86b021b73c2', tx.txfrom) self.assertEqual( '3c523f9cc26f800863c003524392806ff6df373acb4d47cc607b62365fe4ab77' 'cf3018d321df7dcb653c9f7968673e43d12cc26e3461b5f425fd5d977400fea5', bin2hstr(tx.PK)) self.assertEqual( '06b0fcaf2e2ca69299a8a2ce32f0c05cd14cd61e25a9e324ffad8abc5a88aa7f', bin2hstr(tx.txhash)) self.assertEqual(10, tx.ots_key) self.assertEqual(b'', tx.signature) self.assertEqual( 'e2e3d8b08e65b25411af455eb9bb402827fa7b600fa0b36011d62e26899dfa05', bin2hstr(tx.pubhash)) # Test that specific content was copied over. self.assertEqual(2, tx.activation_blocknumber) self.assertEqual( '380793debf8f72e70ef7351ee5005df6c7ca2320ff49e0ead0c40b19c7bb1cc1' '496e19a482c06350bdc054e4ed52a24ec8c994c44f9341d01190a81ab093ade8', bin2hstr(tx.slave_public_key)) self.assertEqual( '1f93603db53bfad5c92390f735d0cbb8617b4ab8214ae91c5664a3d1e9b009c8', bin2hstr(tx.hash))
def handle_message_transaction(self, source, message: qrllegacy_pb2.LegacyMessage): """ Message Transaction This function processes whenever a Transaction having subtype MESSAGE is received. :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.MT) try: tx = Transaction.from_pbdata(message.mtData) except Exception as e: logger.error( 'Message Txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if not source.factory.master_mr.isRequested(tx.get_message_hash(), source): return if tx.txhash in source.factory.buffered_chain.tx_pool.pending_tx_pool_hash: return source.factory.add_unprocessed_txn(tx, source.peer_ip)
def handle_slave(self, source, message: qrllegacy_pb2.LegacyMessage): """ Receives Lattice Public Key Transaction :param message: :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.SL) try: tx = Transaction.from_pbdata(message.slData) except Exception as e: logger.error( 'slave_txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if not source.factory.master_mr.isRequested(tx.get_message_hash(), source): return if not tx.validate(): logger.warning('>>>Slave Txn %s invalid state validation failed..', tx.hash) return source.factory.add_unprocessed_txn(tx, source.peer_ip)
def validate(self, state) -> bool: if not PoWValidator().validate_mining_nonce(state, self.blockheader): logger.warning('Failed PoW Validation') return False fee_reward = 0 for index in range(1, len(self.transactions)): fee_reward += self.transactions[index].fee if len(self.transactions) == 0: return False try: coinbase_txn = Transaction.from_pbdata(self.transactions[0]) coinbase_amount = coinbase_txn.amount if not coinbase_txn.validate_extended(): return False except Exception as e: logger.warning('Exception %s', e) return False if not self.blockheader.validate(fee_reward, coinbase_amount): return False parent_block = state.get_block(self.prev_headerhash) if not self.validate_parent_child_relation(parent_block): logger.warning('Failed to validate blocks parent child relation') return False return True
def test_from_json(self): tx = Transaction.from_json(test_json_Token) self.assertIsInstance(tx, TokenTransaction) self.assertEqual(tx.subtype, qrl_pb2.Transaction.TOKEN) # Test that common Transaction components were copied over. self.assertEqual( b'Q223bc5e5b78edfd778b1bf72702061cc053010711ffeefb9d969318be5d7b86b021b73c2', tx.txfrom) self.assertEqual( '3c523f9cc26f800863c003524392806ff6df373acb4d47cc607b62365fe4ab77' 'cf3018d321df7dcb653c9f7968673e43d12cc26e3461b5f425fd5d977400fea5', bin2hstr(tx.PK)) self.assertEqual(b'QRL', tx.symbol) self.assertEqual(b'Quantum Resistant Ledger', tx.name) self.assertEqual( b'Q223bc5e5b78edfd778b1bf72702061cc053010711ffeefb9d969318be5d7b86b021b73c2', tx.owner) self.assertEqual( 'f6d115628207da9c1bffec208aea7fb196d39433062b18f43619106c43cff4e9', bin2hstr(tx.txhash)) self.assertEqual(10, tx.ots_key) self.assertEqual(b'', tx.signature) self.assertEqual( 'e2e3d8b08e65b25411af455eb9bb402827fa7b600fa0b36011d62e26899dfa05', bin2hstr(tx.pubhash)) total_supply = 0 for initial_balance in tx.initial_balances: total_supply += initial_balance.amount self.assertEqual(600000000, total_supply) self.assertEqual(1, tx.fee)
def rollback(self, rollback_headerhash, hash_path, latest_block_number): while self.last_block.headerhash != rollback_headerhash: self.remove_block_from_mainchain(self.last_block, latest_block_number, None) self.last_block = self.state.get_block( self.last_block.prev_headerhash) for header_hash in hash_path[-1::-1]: block = self.state.get_block(header_hash) address_set = self.state.prepare_address_list( block) # Prepare list for current block addresses_state = self.state.get_state_mainchain(address_set) for tx_idx in range(0, len(block.transactions)): tx = Transaction.from_pbdata(block.transactions[tx_idx]) tx.apply_state_changes(addresses_state) self.state.put_addresses_state(addresses_state) self.last_block = block self._update_mainchain(block, None) self.tx_pool.remove_tx_in_block_from_pool(block) self.state.update_mainchain_height(block.block_number, None) self.state.update_tx_metadata(block, None) self.trigger_miner = True
def from_json(json_block): """ Constructor a block from a json string :param json_block: a block serialized as a json string :return: A block """ tmp_block = Block() json_block = json.loads(json_block) tmp_block.blockheader = BlockHeader.from_json( json_block['blockheader']) if tmp_block.blockheader.blocknumber == 0: tmp_block.state = json_block['state'] tmp_block.stake_list = json_block['stake_list'] json_transactions = json_block['transactions'] json_duplicate_transactions = json_block['duplicate_transactions'] tmp_block.transactions = [ Transaction.from_txdict(tx) for tx in json_transactions ] tmp_block.duplicate_transactions = [ DuplicateTransaction().from_txdict(tx) for tx in json_duplicate_transactions ] return tmp_block
def apply_state_changes(self, address_txn) -> bool: coinbase_tx = Transaction.from_pbdata(self.transactions[0]) if not coinbase_tx.validate_extended(): logger.warning('Coinbase transaction failed') return False coinbase_tx.apply_state_changes(address_txn) len_transactions = len(self.transactions) for tx_idx in range(1, len_transactions): tx = Transaction.from_pbdata(self.transactions[tx_idx]) if isinstance(tx, CoinBase): logger.warning('Found another coinbase transaction') return False if not tx.validate(): return False addr_from_pk_state = address_txn[tx.addr_from] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = address_txn[addr_from_pk] if not tx.validate_extended(address_txn[tx.addr_from], addr_from_pk_state): return False expected_nonce = addr_from_pk_state.nonce + 1 if tx.nonce != expected_nonce: logger.warning('nonce incorrect, invalid tx') logger.warning('subtype: %s', tx.type) logger.warning('%s actual: %s expected: %s', tx.addr_from, tx.nonce, expected_nonce) return False if addr_from_pk_state.ots_key_reuse(tx.ots_key): logger.warning('pubkey reuse detected: invalid tx %s', bin2hstr(tx.txhash)) logger.warning('subtype: %s', tx.type) return False tx.apply_state_changes(address_txn) return True
def slave_tx_generate(ctx, src, addr_from, number_of_slaves, access_type, fee, pk, otsidx): """ Generates Slave Transaction for the wallet """ try: address_src, src_xmss = _select_wallet(ctx, src) src_xmss.set_ots_index(otsidx) if len(addr_from.strip()) == 0: addr_from = address_src if src_xmss: address_src_pk = src_xmss.pk else: address_src_pk = pk.encode() fee_shor = int(fee * 1.e9) except Exception as e: click.echo("Error validating arguments") quit(1) slave_xmss = [] slave_pks = [] access_types = [] slave_xmss_seed = [] if number_of_slaves > 100: click.echo("Error: Max Limit for the number of slaves is 100") quit(1) for i in range(number_of_slaves): print("Generating Slave #" + str(i + 1)) xmss = XMSS.from_height(config.dev.xmss_tree_height) slave_xmss.append(xmss) slave_xmss_seed.append(xmss.extended_seed) slave_pks.append(xmss.pk) access_types.append(access_type) print("Successfully Generated Slave %s/%s" % (str(i + 1), number_of_slaves)) channel = grpc.insecure_channel(ctx.obj.node_public_address) stub = qrl_pb2_grpc.PublicAPIStub(channel) # FIXME: This could be problematic. Check slaveTxnReq = qrl_pb2.SlaveTxnReq(address_from=addr_from, slave_pks=slave_pks, access_types=access_types, fee=fee_shor, xmss_pk=address_src_pk, ) try: slaveTxnResp = stub.GetSlaveTxn(slaveTxnReq, timeout=5) tx = Transaction.from_pbdata(slaveTxnResp.transaction_unsigned) tx.sign(src_xmss) with open('slaves.json', 'w') as f: json.dump([bin2hstr(src_xmss.address), slave_xmss_seed, tx.to_json()], f) click.echo('Successfully created slaves.json') click.echo('Move slaves.json file from current directory to the mining node inside ~/.qrl/') except grpc.RpcError as e: click.echo(e.details()) quit(1) except Exception as e: click.echo("Unhandled error: {}".format(str(e))) quit(1)
def rollback_tx_metadata(self, block, batch): fee_reward = 0 for protobuf_txn in block.transactions: txn = Transaction.from_pbdata(protobuf_txn) fee_reward -= txn.fee self.remove_tx_metadata(txn, batch) # FIXME: Being updated without batch, need to fix, if isinstance(txn, TransferTokenTransaction): self.remove_transfer_token_metadata(txn) elif isinstance(txn, TokenTransaction): self.remove_token_metadata(txn) self.decrease_txn_count(self.get_txn_count(txn.addr_from), txn.addr_from) txn = Transaction.from_pbdata(block.transactions[0]) # Coinbase Transaction self.update_total_coin_supply(fee_reward - txn.amount) self.remove_last_tx(block, batch)
def update_vote_metadata(self, prev_stake_validators_tracker): self.total_stake_amount = prev_stake_validators_tracker.get_total_stake_amount( ) for vote_protobuf in self.block.vote: vote = Transaction.from_pbdata(vote_protobuf) if vote.headerhash == self.block.prev_headerhash: self.voted_weight += prev_stake_validators_tracker.get_stake_balance( vote.txfrom)
def remove_tx_in_block_from_pool(self, block_obj: Block): for protobuf_tx in block_obj.transactions: tx = Transaction.from_pbdata(protobuf_tx) idx = self.get_tx_index_from_pool(tx.txhash) if idx > -1: del self.transaction_pool[idx] heapq.heapify(self.transaction_pool)
def GetObject(self, request: qrl_pb2.GetObjectReq, context) -> qrl_pb2.GetObjectResp: logger.debug("[PublicAPI] GetObject") answer = qrl_pb2.GetObjectResp() answer.found = False # FIXME: We need a unified way to access and validate data. query = bytes( request.query ) # query will be as a string, if Q is detected convert, etc. if AddressState.address_is_valid(query): if self.qrlnode.get_address_is_used(query): address_state = self.qrlnode.get_address_state(query) if address_state is not None: answer.found = True answer.address_state.CopyFrom(address_state.pbdata) return answer transaction, block_number = self.qrlnode.get_transaction(query) if transaction is not None: answer.found = True blockheader = None if block_number is not None: block = self.qrlnode.get_block_from_index(block_number) blockheader = block.blockheader.pbdata txextended = qrl_pb2.TransactionExtended( header=blockheader, tx=transaction.pbdata, addr_from=transaction.addr_from, size=transaction.size) answer.transaction.CopyFrom(txextended) return answer # NOTE: This is temporary, indexes are accepted for blocks try: block = self.qrlnode.get_block_from_hash(query) if block is None: query_str = query.decode() query_index = int(query_str) block = self.qrlnode.get_block_from_index(query_index) answer.found = True block_extended = qrl_pb2.BlockExtended() block_extended.header.CopyFrom(block.blockheader.pbdata) block_extended.size = block.size for transaction in block.transactions: tx = Transaction.from_pbdata(transaction) extended_tx = qrl_pb2.TransactionExtended( tx=transaction, addr_from=tx.addr_from, size=tx.size) block_extended.extended_transactions.extend([extended_tx]) answer.block_extended.CopyFrom(block_extended) return answer except Exception: pass return answer
def get_tx_metadata(self, txhash: bytes): try: tx_metadata = self._db.get(bin2hstr(txhash)) except Exception: return None if tx_metadata is None: return None txn_json, block_number, _ = tx_metadata return Transaction.from_json(txn_json), block_number
def create_block(self, last_block, mining_nonce, tx_pool: TransactionPool, miner_address) -> Optional[Block]: # TODO: Persistence will move to rocksdb # FIXME: Difference between this and create block????????????? dummy_block = Block.create(block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=[], miner_address=miner_address) dummy_block.set_nonces(mining_nonce, 0) t_pool2 = tx_pool.transactions addresses_set = set() for tx_set in t_pool2: tx = tx_set[1] tx.set_effected_address(addresses_set) addresses_state = dict() for address in addresses_set: addresses_state[address] = self.state.get_address(address) block_size = dummy_block.size block_size_limit = self.state.get_block_size_limit(last_block) transactions = [] for tx_set in t_pool2: tx = tx_set[1] # Skip Transactions for later, which doesn't fit into block if block_size + tx.size + config.dev.tx_extra_overhead > block_size_limit: continue addr_from_pk_state = addresses_state[tx.addr_from] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = addresses_state[addr_from_pk] if not tx.validate_extended(addresses_state[tx.addr_from], addr_from_pk_state): logger.warning('Txn validation failed for tx in tx_pool') tx_pool.remove_tx_from_pool(tx) continue tx.apply_on_state(addresses_state) tx._data.nonce = addr_from_pk_state.nonce block_size += tx.size + config.dev.tx_extra_overhead transactions.append(tx) block = Block.create(block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=transactions, miner_address=miner_address) return block
def get_tx_metadata(db, state_code, txhash: bytes): try: tx_metadata = db.get(str(state_code) + bin2hstr(txhash)) except Exception: return None if tx_metadata is None: return None txn_json, block_number, _ = tx_metadata return Transaction.from_json(txn_json), block_number
def _parse_tx_object(source, message: qrllegacy_pb2.LegacyMessage, kind): tx = None try: tx = Transaction.from_pbdata(message.mtData) except Exception as e: logger.error('Message Txn rejected - unable to decode serialised data - closing connection') logger.exception(e) source.loseConnection() return tx
def PushTransaction(self, request: qrl_pb2.PushTransactionReq, context) -> qrl_pb2.PushTransactionResp: logger.debug("[PublicAPI] PushTransaction") tx = Transaction.from_pbdata(request.transaction_signed) submitted = self.qrlnode.submit_send_tx(tx) # FIXME: Improve response type # Prepare response answer = qrl_pb2.PushTransactionResp() answer.some_response = str(submitted) return answer
def valid_mining_permission(self): if self._master_address == self._mining_xmss.get_address(): return True addr_state = self.state.get_address(self._master_address) access_type = addr_state.get_slave_permission(self._mining_xmss.pk()) if access_type == -1: logger.warning('Slave is not authorized yet for mining') logger.warning('Added Slave Txn') slave_tx = Transaction.from_json(self._slaves[2]) self._add_unprocessed_txn_fn(slave_tx, None) return None return True
def prepare_address_list(block) -> set: addresses = set() for proto_tx in block.transactions: tx = Transaction.from_pbdata(proto_tx) tx.set_effected_address(addresses) for genesis_balance in GenesisBlock().genesis_balance: bytes_addr = genesis_balance.address.encode() if bytes_addr not in addresses: addresses.add(bytes_addr) return addresses
def get_state(self, header_hash, addresses_set): tmp_header_hash = header_hash parent_headerhash = None addresses_state = dict() for address in addresses_set: addresses_state[address] = None while True: if self.state_objects.contains(header_hash): parent_headerhash = header_hash self.set_addresses_state(addresses_state, header_hash) break block = self.get_block(header_hash) if not block: logger.warning('[get_state] No Block Found %s', header_hash) break if block.block_number == 0: break header_hash = block.prev_headerhash for genesis_balance in GenesisBlock().genesis_balance: bytes_addr = genesis_balance.address.encode() if not addresses_state[bytes_addr]: addresses_state[bytes_addr] = AddressState.get_default(bytes_addr) addresses_state[bytes_addr]._data.balance = genesis_balance.balance for address in addresses_state: if not addresses_state[address]: addresses_state[address] = AddressState.get_default(address) header_hash = tmp_header_hash hash_path = [] while True: if parent_headerhash == header_hash: break block = self.get_block(header_hash) if not block: break hash_path.append(header_hash) header_hash = block.prev_headerhash if block.block_number == 0: break for header_hash in hash_path[-1::-1]: block = self.get_block(header_hash) for tx_pbdata in block.transactions: tx = Transaction.from_pbdata(tx_pbdata) tx.apply_on_state(addresses_state) return addresses_state
def get_last_txs(self): try: last_txn = self._db.get('last_txn') except: # noqa return [] txs = [] for tx_metadata in last_txn: tx_json, block_num, block_ts = tx_metadata tx = Transaction.from_json(tx_json) txs.append(tx) return txs
def tx_sign(ctx, src, txblob): """ Sign a tx blob """ txbin = bytes(hstr2bin(txblob)) pbdata = qrl_pb2.Transaction() pbdata.ParseFromString(txbin) tx = Transaction.from_pbdata(pbdata) address_src, address_xmss = _select_wallet(ctx, src) tx.sign(address_xmss) txblob = bin2hstr(tx.pbdata.SerializeToString()) print(txblob)
def validate(self) -> bool: fee_reward = 0 for index in range(1, len(self.transactions)): fee_reward += self.transactions[index].fee if len(self.transactions) == 0: return False try: coinbase_txn = Transaction.from_pbdata(self.transactions[0]) coinbase_amount = coinbase_txn.amount except Exception as e: logger.warning('Exception %s', e) return False return self.blockheader.validate(fee_reward, coinbase_amount)
def tx_inspect(ctx, txblob): """ Inspected a transaction blob """ tx = None try: txbin = bytes(hstr2bin(txblob)) pbdata = qrl_pb2.Transaction() pbdata.ParseFromString(txbin) tx = Transaction.from_pbdata(pbdata) except Exception as e: click.echo("tx blob is not valid") quit(1) tmp_json = tx.to_json() # FIXME: binary fields are represented in base64. Improve output print(tmp_json)
def tx_transfer(ctx, src, dst, amount, fee): """ Transfer coins from src to dst """ if not ctx.obj.remote: click.echo('This command is unsupported for local wallets') return try: address_src, src_xmss = _select_wallet(ctx, src) if not src_xmss: click.echo("A local wallet is required to sign the transaction") quit(1) address_src_pk = src_xmss.pk() address_src_otsidx = src_xmss.get_index() address_dst = dst.encode() # FIXME: This could be problematic. Check amount_shor = int(amount * 1.e9) fee_shor = int(fee * 1.e9) except Exception as e: click.echo("Error validating arguments") quit(1) try: channel = grpc.insecure_channel(ctx.obj.node_public_address) stub = qrl_pb2_grpc.PublicAPIStub(channel) transferCoinsReq = qrl_pb2.TransferCoinsReq(address_from=address_src, address_to=address_dst, amount=amount_shor, fee=fee_shor, xmss_pk=address_src_pk, xmss_ots_index=address_src_otsidx) transferCoinsResp = stub.TransferCoins(transferCoinsReq, timeout=5) tx = Transaction.from_pbdata(transferCoinsResp.transaction_unsigned) tx.sign(src_xmss) pushTransactionReq = qrl_pb2.PushTransactionReq(transaction_signed=tx.pbdata) pushTransactionResp = stub.PushTransaction(pushTransactionReq, timeout=5) print(pushTransactionResp.some_response) except Exception as e: print("Error {}".format(str(e)))
def update_tx_metadata(self, block, batch): if len(block.transactions) == 0: return # TODO (cyyber): Move To State Cache, instead of writing directly for protobuf_txn in block.transactions: txn = Transaction.from_pbdata(protobuf_txn) self._db.put(bin2hstr(txn.txhash), [txn.to_json(), block.block_number, block.timestamp], batch) # FIXME: Being updated without batch, need to fix, if txn.subtype == qrl_pb2.Transaction.TRANSFERTOKEN: self.update_token_metadata(txn) if txn.subtype == qrl_pb2.Transaction.TOKEN: self.create_token_metadata(txn) self.increase_txn_count(txn.txfrom) self.update_last_tx(block, batch)
def update_last_tx(self, block, batch): if len(block.transactions) == 0: return last_txn = [] try: last_txn = self._db.get('last_txn') except: # noqa pass for protobuf_txn in block.transactions[-20:]: txn = Transaction.from_pbdata(protobuf_txn) if txn.subtype == qrl_pb2.Transaction.COINBASE: continue last_txn.insert(0, [txn.to_json(), block.block_number, block.timestamp]) del last_txn[20:] self._db.put('last_txn', last_txn, batch)
def tx_push(ctx, txblob): tx = None try: txbin = bytes(hstr2bin(txblob)) pbdata = qrl_pb2.Transaction() pbdata.ParseFromString(txbin) tx = Transaction.from_pbdata(pbdata) except Exception as e: click.echo("tx blob is not valid") quit(1) tmp_json = tx.to_json() # FIXME: binary fields are represented in base64. Improve output print(tmp_json) if (len(tx.signature) == 0): click.echo('Signature missing') quit(1) channel = grpc.insecure_channel(ctx.obj.node_public_address) stub = qrl_pb2_grpc.PublicAPIStub(channel) pushTransactionReq = qrl_pb2.PushTransactionReq(transaction_signed=tx.pbdata) pushTransactionResp = stub.PushTransaction(pushTransactionReq, timeout=5) print(pushTransactionResp.some_response)
def create_block(self, last_block, mining_nonce, tx_pool, signing_xmss, master_address) -> Optional[Block]: # TODO: Persistence will move to rocksdb # FIXME: Difference between this and create block????????????? # FIXME: Break encapsulation dummy_block = Block.create(mining_nonce=mining_nonce, block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=[], signing_xmss=signing_xmss, master_address=master_address, nonce=0) dummy_block.set_mining_nonce(mining_nonce) signing_xmss.set_index(signing_xmss.get_index() - 1) t_pool2 = copy.deepcopy(tx_pool.transaction_pool) del tx_pool.transaction_pool[:] ###### # recreate the transaction pool as in the tx_hash_list, ordered by txhash.. total_txn = len(t_pool2) txnum = 0 addresses_set = set() while txnum < total_txn: tx = t_pool2[txnum] tx.set_effected_address(addresses_set) txnum += 1 addresses_state = dict() for address in addresses_set: addresses_state[address] = self.state.get_address(address) block_size = dummy_block.size block_size_limit = self.state.get_block_size_limit(last_block) txnum = 0 while txnum < total_txn: tx = t_pool2[txnum] # Skip Transactions for later, which doesn't fit into block if block_size + tx.size + config.dev.tx_extra_overhead > block_size_limit: txnum += 1 continue addr_from_pk_state = addresses_state[tx.txfrom] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = addresses_state[addr_from_pk] if tx.ots_key_reuse(addr_from_pk_state, tx.ots_key): del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TRANSFER: if addresses_state[tx.txfrom].balance < tx.amount + tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.amount) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.MESSAGE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid message tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Free %s', addresses_state[tx.txfrom].balance, tx.fee) total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TOKEN: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Fee %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TRANSFERTOKEN: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if bin2hstr(tx.token_txhash).encode() not in addresses_state[tx.txfrom].tokens: logger.warning('%s doesnt own any token with token_txnhash %s', tx.txfrom, bin2hstr(tx.token_txhash).encode()) del t_pool2[txnum] total_txn -= 1 continue if addresses_state[tx.txfrom].tokens[bin2hstr(tx.token_txhash).encode()] < tx.amount: logger.warning('Token Transfer amount exceeds available token') logger.warning('Token Txhash %s', bin2hstr(tx.token_txhash).encode()) logger.warning('Available Token Amount %s', addresses_state[tx.txfrom].tokens[bin2hstr(tx.token_txhash).encode()]) logger.warning('Transaction Amount %s', tx.amount) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.LATTICE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('Lattice TXN %s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.SLAVE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('Slave TXN %s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue tx.apply_on_state(addresses_state) tx_pool.add_tx_to_pool(tx) tx._data.nonce = addresses_state[tx.txfrom].nonce txnum += 1 block_size += tx.size + config.dev.tx_extra_overhead coinbase_nonce = self.state.get_address(signing_xmss.get_address()).nonce if signing_xmss.get_address() in addresses_state: coinbase_nonce = addresses_state[signing_xmss.get_address()].nonce + 1 block = Block.create(mining_nonce=mining_nonce, block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=t_pool2, signing_xmss=signing_xmss, master_address=master_address, nonce=coinbase_nonce) return block
def set_unused_ots_key(self, xmss, addr_state, start=0): for i in range(start, 2 ** xmss.height): if not Transaction.ots_key_reuse(addr_state, i): xmss.set_index(i) return True return False
def remove_tx_in_block_from_pool(self, block_obj: Block): for protobuf_tx in block_obj.transactions: tx = Transaction.from_pbdata(protobuf_tx) for txn in self.transaction_pool: if tx.txhash == txn.txhash: self.remove_tx_from_pool(txn)
def validate_block(self, block, address_txn) -> bool: len_transactions = len(block.transactions) if len_transactions < 1: return False coinbase_tx = Transaction.from_pbdata(block.transactions[0]) coinbase_tx.validate() if not self.validate_mining_nonce(block): return False if coinbase_tx.subtype != qrl_pb2.Transaction.COINBASE: return False if not coinbase_tx.validate(): return False coinbase_tx.apply_on_state(address_txn) addr_from_pk_state = address_txn[coinbase_tx.txto] addr_from_pk = Transaction.get_slave(coinbase_tx) if addr_from_pk: addr_from_pk_state = address_txn[addr_from_pk] if not coinbase_tx.validate_extended(address_txn[coinbase_tx.txto], addr_from_pk_state, []): return False # TODO: check block reward must be equal to coinbase amount for tx_idx in range(1, len_transactions): tx = Transaction.from_pbdata(block.transactions[tx_idx]) if tx.subtype == qrl_pb2.Transaction.COINBASE: return False if not tx.validate(): # TODO: Move this validation, before adding txn to pool return False addr_from_pk_state = address_txn[tx.txfrom] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = address_txn[addr_from_pk] if not tx.validate_extended(address_txn[tx.txfrom], addr_from_pk_state, []): return False expected_nonce = address_txn[tx.txfrom].nonce + 1 if tx.nonce != expected_nonce: logger.warning('nonce incorrect, invalid tx') logger.warning('subtype: %s', tx.subtype) logger.warning('%s actual: %s expected: %s', tx.txfrom, tx.nonce, expected_nonce) return False if tx.ots_key_reuse(address_txn[tx.txfrom], tx.ots_key): logger.warning('pubkey reuse detected: invalid tx %s', tx.txhash) logger.warning('subtype: %s', tx.subtype) return False tx.apply_on_state(address_txn) return True
def slave_tx_generate(ctx, src, addr_from, number_of_slaves, access_type, fee, pk, otsidx): """ Generates Slave Transaction for the wallet """ try: address_src, src_xmss = _select_wallet(ctx, src) if len(addr_from.strip()) == 0: addr_from = address_src if src_xmss: address_src_pk = src_xmss.pk() address_src_otsidx = src_xmss.get_index() else: address_src_pk = pk.encode() address_src_otsidx = int(otsidx) fee_shor = int(fee * 1.e9) except Exception as e: click.echo("Error validating arguments") quit(1) slave_xmss = [] slave_pks = [] access_types = [] slave_xmss_seed = [] if number_of_slaves > 100: click.echo("Error: Max Limit for the number of slaves is 100") quit(1) for i in range(number_of_slaves): print("Generating Slave #"+str(i+1)) xmss = XMSS(config.dev.xmss_tree_height) slave_xmss.append(xmss) slave_xmss_seed.append(xmss.get_seed()) slave_pks.append(xmss.pk()) access_types.append(access_type) print("Successfully Generated Slave %s/%s" % (str(i + 1), number_of_slaves)) channel = grpc.insecure_channel(ctx.obj.node_public_address) stub = qrl_pb2_grpc.PublicAPIStub(channel) # FIXME: This could be problematic. Check slaveTxnReq = qrl_pb2.SlaveTxnReq(address_from=addr_from, slave_pks=slave_pks, access_types=access_types, fee=fee_shor, xmss_pk=address_src_pk, xmss_ots_index=address_src_otsidx) try: slaveTxnResp = stub.GetSlaveTxn(slaveTxnReq, timeout=5) tx = Transaction.from_pbdata(slaveTxnResp.transaction_unsigned) tx.sign(src_xmss) with open('slaves.json', 'w') as f: json.dump([src_xmss.get_address(), slave_xmss_seed, tx.to_json()], f) click.echo('Successfully created slaves.json') click.echo('Move slaves.json file from current directory to the mining node inside ~/.qrl/') except grpc.RpcError as e: click.echo(e.details()) quit(1) except Exception as e: click.echo("Unhandled error: {}".format(str(e))) quit(1)