def _prune(self, childs, batch): while childs: child_headerhash = childs.pop(0) block_metadata = self.state.get_block_metadata(child_headerhash) childs += block_metadata.child_headerhashes self.state.delete(bin2hstr(child_headerhash).encode(), batch) self.state.delete(b'metadata_' + bin2hstr(child_headerhash).encode(), batch)
def get_block(self, header_hash: bytes) -> Optional[Block]: try: json_data = self._db.get_raw(bin2hstr(header_hash).encode()) return Block.from_json(json_data) except KeyError: logger.debug('[get_block] Block header_hash %s not found', bin2hstr(header_hash).encode()) except Exception as e: logger.error('[get_block] %s', e) return None
def test_hashchain_reveal(self): seed = sha256(b'test_seed') self.assertEqual('1d3b37dedc74980941b3b65640e8d2851658feac0d38196f372ada9c2ac0b077', bin2hstr(hashchain_reveal(seed, 1, 100))) self.assertEqual('127f5db0388cd82bd4af80b88e8d68409e1f70fd322f96b3f2aca55b0ade116f', bin2hstr(hashchain_reveal(seed, 1, 0))) self.assertEqual('ff7f4850bc6499e08e104c6967ee66e665e57d7e0e429072e646d14e1b92600a', bin2hstr(hashchain_reveal(seed, 1, 50))) self.assertNotEqual('ff7f4850bc6499e08e104c6967ee66e665e57d7e0e429072e646d14e1b92600a', bin2hstr(hashchain_reveal(seed, 1, 51)))
def test_hashchain_verify(self): seed = sha256(b'test_seed') HASHCHAIN_SIZE = 100 hcb = hashchain(seed, 1, HASHCHAIN_SIZE) self.assertIsNotNone(hcb) self.assertEqual(HASHCHAIN_SIZE + 1, len(hcb.hashchain)) for i, value in enumerate(hcb.hashchain): tmp = sha256_n(value, HASHCHAIN_SIZE - i) logger.info("{:-4} {} {}".format(i, bin2hstr(value), bin2hstr(tmp))) self.assertEqual(hcb.hc_terminator, tmp)
def _validate_custom(self): if self.fee <= 0: raise ValueError('TokenTransaction [%s] Invalid Fee = %d', bin2hstr(self.txhash), self.fee) for initial_balance in self._data.token.initial_balances: if initial_balance.amount <= 0: raise ValueError('TokenTransaction [%s] Invalid Amount = %s for address %s', bin2hstr(self.txhash), initial_balance.amount, initial_balance.address) return True
def destroy_fork_states(self, block_number, headerhash): """ Removes all the cache state, which are created further, the current blocknumber. Usually done when a new branch found as main branch. :param block_number: :param headerhash: :return: """ str_headerhash = bin2hstr(headerhash).encode() len_state_loaders = len(self._state_loaders) index = 0 while index < len_state_loaders: state_loader = self._state_loaders[index] logger.debug('Comparing #%s>%s', state_loader.block_number, block_number) if state_loader.block_number > block_number: logger.debug('Destroyed State #%s', state_loader.block_number) self.destroy_state_loader(index) len_state_loaders -= 1 continue if state_loader.block_number == block_number: if state_loader.state_code != str_headerhash: self.destroy_state_loader(index) len_state_loaders -= 1 continue index += 1
def set_addresses_state(self, addresses_state: dict, state_code: bytes): """ Sets the addresses_state from the latest state objects cache from or after state_code. :param addresses_state: :param state_code: :return: """ str_state_code = bin2hstr(state_code).encode() index = -1 found = False for state_object in self.state_objects.state_loaders: index += 1 if state_object.state_code == str_state_code: found = True break if not found: logger.warning('Not Possible: State Code not found %s', str_state_code) raise Exception for address in addresses_state: for state_obj_index in range(index, -1, -1): state_object = self.state_objects.get_state_loader_by_index(state_obj_index) addresses_state[address] = state_object.get_address(address) if addresses_state[address]: break if not addresses_state[address]: addresses_state[address] = self._get_address_state(address)
def score(stake_address: bytes, reveal_one: bytes, balance: int = 0, seed: bytes = None, verbose: bool = False): if not seed: logger.info('Exception Raised due to seed none in score fn') raise Exception if not balance: logger.info(' balance 0 so score none ') logger.info(' stake_address %s', stake_address) return None # FIXME: Review this reveal_seed = bin2hstr(sha256(str(reveal_one).encode() + str(seed).encode())) score = (Decimal(config.dev.N) - (Decimal(int(reveal_seed, 16)).log10() / Decimal(2).log10())) / Decimal(balance) if verbose: logger.info('=' * 10) logger.info('Score - %s', score) logger.info('reveal_one - %s', reveal_one) logger.info('seed - %s', seed) logger.info('balance - %s', balance) return score
def apply_on_state(self, addresses_state): addr_from_pk = getAddress('Q', self.PK).encode() owner_processed = False txfrom_processed = False addr_from_pk_processed = False for initial_balance in self.initial_balances: if initial_balance.address == self.owner: owner_processed = True if initial_balance.address == self.txfrom: txfrom_processed = True if initial_balance.address == addr_from_pk: addr_from_pk_processed = True if initial_balance.address in addresses_state: addresses_state[initial_balance.address].tokens[bin2hstr(self.txhash).encode()] += initial_balance.amount addresses_state[initial_balance.address].transaction_hashes.append(self.txhash) if self.owner in addresses_state and not owner_processed: addresses_state[self.owner].transaction_hashes.append(self.txhash) if self.txfrom in addresses_state: addresses_state[self.txfrom].balance -= self.fee if not txfrom_processed: addresses_state[self.txfrom].transaction_hashes.append(self.txhash) if addr_from_pk in addresses_state: if self.txfrom != addr_from_pk: if not addr_from_pk_processed: addresses_state[addr_from_pk].transaction_hashes.append(self.txhash) addresses_state[addr_from_pk].increase_nonce() self.set_ots_key(addresses_state[addr_from_pk], self.ots_key)
def __next__(self): if not self.transaction_pool_obj.pending_tx_pool: raise StopIteration if len(self.transaction_pool_obj.transaction_pool) >= config.dev.transaction_pool_size: raise StopIteration tx = self.transaction_pool_obj.pending_tx_pool.pop(0) tx = tx[0] if not tx.validate(): return False addr_from_state = self.state.get_address(address=tx.txfrom) addr_from_pk_state = addr_from_state addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = self.state.get_address(address=addr_from_pk) is_valid_state = tx.validate_extended(addr_from_state=addr_from_state, addr_from_pk_state=addr_from_pk_state, transaction_pool=self.transaction_pool_obj.transaction_pool) is_valid_pool_state = tx.validate_transaction_pool(self.transaction_pool_obj.transaction_pool) if not (is_valid_state and is_valid_pool_state): logger.info('>>>TX %s failed state_validate', tx.txhash) return False logger.info('A TXN has been Processed %s', bin2hstr(tx.txhash)) self.transaction_pool_obj.add_tx_to_pool(tx) self.broadcast_tx(tx) return True
def _validate_custom(self): if self.fee <= 0: raise ValueError('TransferTokenTransaction [%s] Invalid Fee = %d', bin2hstr(self.txhash), self.fee) if not (AddressState.address_is_valid(self.addr_from) and AddressState.address_is_valid(self.txto)): logger.warning('Invalid address addr_from: %s addr_to: %s', self.addr_from, self.txto) return False return True
def _validate_custom(self): if self.amount <= 0: raise ValueError('[%s] Invalid amount = %d', bin2hstr(self.txhash), self.amount) if not (AddressState.address_is_valid(self.addr_from) and AddressState.address_is_valid(self.txto)): logger.warning('Invalid address addr_from: %s addr_to: %s', self.addr_from, self.txto) return False return True
def get_tx_metadata(self, txhash: bytes): try: tx_metadata = self._db.get(bin2hstr(txhash)) except Exception: return None if tx_metadata is None: return None txn_json, block_number, _ = tx_metadata return Transaction.from_json(txn_json), block_number
def apply_on_state(self, addresses_state): if self.txfrom in addresses_state: addresses_state[self.txfrom].tokens[bin2hstr(self.token_txhash).encode()] -= self.amount if addresses_state[self.txfrom].tokens[bin2hstr(self.token_txhash).encode()] == 0: del addresses_state[self.txfrom].tokens[bin2hstr(self.token_txhash).encode()] addresses_state[self.txfrom].balance -= self.fee addresses_state[self.txfrom].transaction_hashes.append(self.txhash) if self.txto in addresses_state: if self.txfrom != self.txto: addresses_state[self.txto].transaction_hashes.append(self.txhash) addresses_state[self.txto].tokens[bin2hstr(self.token_txhash).encode()] += self.amount addr_from_pk = getAddress('Q', self.PK).encode() if addr_from_pk in addresses_state: if self.txfrom != addr_from_pk: addresses_state[addr_from_pk].transaction_hashes.append(self.txhash) addresses_state[addr_from_pk].increase_nonce() self.set_ots_key(addresses_state[addr_from_pk], self.ots_key)
def get_hexseed(self): # FIXME: Move to property """ :return: :rtype: >>> from qrl.crypto.doctest_data import *; XMSS(4, xmss_test_seed1).get_hexseed() '303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030' >>> from qrl.crypto.doctest_data import *; XMSS(4, xmss_test_seed2).get_hexseed() '333133313331333133313331333133313331333133313331333133313331333133313331333133313331333133313331' """ return bin2hstr(self._seed)
def _pre_check(self, block, ignore_duplicate): if block.block_number < 1: return False if not block.validate(): return False if (not ignore_duplicate) and self.state.get_block(block.headerhash): # Duplicate block check logger.info('Duplicate block %s %s', block.block_number, bin2hstr(block.headerhash)) return False return True
def tx_sign(ctx, src, txblob): """ Sign a tx blob """ txbin = bytes(hstr2bin(txblob)) pbdata = qrl_pb2.Transaction() pbdata.ParseFromString(txbin) tx = Transaction.from_pbdata(pbdata) address_src, address_xmss = _select_wallet(ctx, src) tx.sign(address_xmss) txblob = bin2hstr(tx.pbdata.SerializeToString()) print(txblob)
def push(self, headerhash: bytes, batch=None): state_loader = StateLoader(state_code=bin2hstr(headerhash).encode(), db=self._db) self._current_state.commit(state_loader) self._data.state_loaders.append(state_loader.state_code) self._state_loaders.append(state_loader) if len(self._state_loaders) > config.user.max_state_limit: state_loader = self._state_loaders[0] del self._state_loaders[0] del self._data.state_loaders[0] state_loader.update_main() self._db.put_raw(b'state_objects', MessageToJson(self._data).encode(), batch)
def validate(self) -> bool: """ This method calls validate_or_raise, logs any failure and returns True or False accordingly The main purpose is to avoid exceptions and accomodate legacy code :return: True is the transation is valid :rtype: bool """ try: self.validate_or_raise() except ValueError as e: logger.info('[%s] failed validate_tx', bin2hstr(self.txhash)) logger.warning(str(e)) return False except Exception as e: logger.exception(e) return False return True
def pre_block_logic(self, block: Block): logger.debug('Checking miner lock') with self._miner_lock: logger.debug('Inside add_block') result = self.chain_manager.add_block(block) logger.debug('trigger_miner %s', self.chain_manager.trigger_miner) logger.debug('is_syncing %s', self.p2p_factory.is_syncing()) if not self.p2p_factory.is_syncing(): if self.chain_manager.trigger_miner or not self.miner.isRunning(): self.mine_next(self.chain_manager.last_block) if not result: logger.debug('Block Rejected %s %s', block.block_number, bin2hstr(block.headerhash)) return reactor.callLater(0, self.broadcast_block, block)
def update_tx_metadata(self, block, batch): if len(block.transactions) == 0: return # TODO (cyyber): Move To State Cache, instead of writing directly for protobuf_txn in block.transactions: txn = Transaction.from_pbdata(protobuf_txn) self._db.put(bin2hstr(txn.txhash), [txn.to_json(), block.block_number, block.timestamp], batch) # FIXME: Being updated without batch, need to fix, if txn.subtype == qrl_pb2.Transaction.TRANSFERTOKEN: self.update_token_metadata(txn) if txn.subtype == qrl_pb2.Transaction.TOKEN: self.create_token_metadata(txn) self.increase_txn_count(txn.txfrom) self.update_last_tx(block, batch)
def tx_prepare(ctx, src, dst, amount, fee, pk, otsidx): """ Request a tx blob (unsigned) to transfer from src to dst (uses local wallet) """ try: address_src, src_xmss = _select_wallet(ctx, src) if src_xmss: address_src_pk = src_xmss.pk() address_src_otsidx = src_xmss.get_index() else: address_src_pk = pk.encode() address_src_otsidx = int(otsidx) address_dst = dst.encode() amount_shor = int(amount * 1.e9) fee_shor = int(fee * 1.e9) except Exception as e: click.echo("Error validating arguments") quit(1) channel = grpc.insecure_channel(ctx.obj.node_public_address) stub = qrl_pb2_grpc.PublicAPIStub(channel) # FIXME: This could be problematic. Check transferCoinsReq = qrl_pb2.TransferCoinsReq(address_from=address_src, address_to=address_dst, amount=amount_shor, fee=fee_shor, xmss_pk=address_src_pk, xmss_ots_index=address_src_otsidx) try: transferCoinsResp = stub.TransferCoins(transferCoinsReq, timeout=5) except grpc.RpcError as e: click.echo(e.details()) quit(1) except Exception as e: click.echo("Unhandled error: {}".format(str(e))) quit(1) txblob = bin2hstr(transferCoinsResp.transaction_unsigned.SerializeToString()) print(txblob)
def block_received(self, source, block: Block): self.pow.last_pb_time = time.time() logger.info('>>> Received Block #%d %s', block.block_number, bin2hstr(block.headerhash)) if source != self._target_peer: logger.warning('Received block from unexpected peer') logger.warning('Expected peer: %s', self._target_peer.connection_id) logger.warning('Found peer: %s', source.connection_id) return if block.block_number != self._last_requested_block_idx: logger.warning('Did not match %s', self._last_requested_block_idx) return target_start_blocknumber = self._target_node_header_hash.block_number expected_headerhash = self._target_node_header_hash.headerhashes[block.block_number - target_start_blocknumber] if block.headerhash != expected_headerhash: logger.warning('Did not match headerhash') logger.warning('Expected headerhash %s', expected_headerhash) logger.warning('Found headerhash %s', block.headerhash) return # FIXME: This check should not be necessary if not self._chain_manager.add_block(block): logger.warning('Failed to Add Block') return try: reactor.download_monitor.cancel() except Exception as e: logger.warning("PB: %s", e) if self.is_syncing_finished(): return self._last_requested_block_idx += 1 if self.is_syncing_finished(): return self.peer_fetch_block()
def handle_block(self, source, message: qrllegacy_pb2.LegacyMessage): # block received """ Block This function processes any new block received. :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.BK) try: block = Block(message.block) except Exception as e: logger.error('block rejected - unable to decode serialised data %s', source.peer_ip) logger.exception(e) return logger.info('>>>Received block from %s %s %s', source.connection_id, block.block_number, bin2hstr(block.headerhash)) if not source.factory.master_mr.isRequested(block.headerhash, source, block): return source.factory.pow.pre_block_logic(block) # FIXME: Ignores return value source.factory.master_mr.register(qrllegacy_pb2.LegacyMessage.BK, block.headerhash, message.block)
def test_create_hashchain(self): seed = sha256(b'test_seed') HASHCHAIN_SIZE = 100 hcb = hashchain(seed, 1, HASHCHAIN_SIZE) self.assertIsNotNone(hcb) self.assertEqual(HASHCHAIN_SIZE + 1, len(hcb.hashchain)) # FIXME: Why seed comes as an array of tuples? self.assertEqual('127f5db0388cd82bd4af80b88e8d68409e1f70fd322f96b3f2aca55b0ade116f', bin2hstr(hcb.seed[0])) self.assertEqual('1d3b37dedc74980941b3b65640e8d2851658feac0d38196f372ada9c2ac0b077', bin2hstr(hcb.hc_terminator)) self.assertEqual('1d3b37dedc74980941b3b65640e8d2851658feac0d38196f372ada9c2ac0b077', bin2hstr(hcb.hashchain[-1])) self.assertEqual('127f5db0388cd82bd4af80b88e8d68409e1f70fd322f96b3f2aca55b0ade116f', bin2hstr(hcb.hashchain[0])) self.assertNotEqual('127f5db0388cd82bd4af80b88e8d68409e1f70fd322f96b3f2aca55b0ade116f', bin2hstr(hcb.hashchain[1])) self.assertEqual('ff7f4850bc6499e08e104c6967ee66e665e57d7e0e429072e646d14e1b92600a', bin2hstr(hcb.hashchain[50]))
def address_to_qaddress(address: bytes): return 'Q' + bin2hstr(address)
def get_random_master(): random_master = get_random_xmss(config.dev.xmss_tree_height) slave_data = json.loads(json.dumps([bin2hstr(random_master.address), [random_master.extended_seed], None])) slave_data[0] = bytes(hstr2bin(slave_data[0])) return slave_data
def test_getMiniTransactionsByAddress(self, mock_dev_config): mock_dev_config.data_per_page = 5 with set_qrl_dir('no_data'): db_state = State() p2p_factory = Mock(spec=P2PFactory) p2p_factory.pow = Mock(spec=POW) chain_manager = ChainManager(db_state) qrlnode = QRLNode(mining_address=b'') qrlnode.set_chain_manager(chain_manager) qrlnode._p2pfactory = p2p_factory qrlnode._pow = p2p_factory.pow qrlnode._peer_addresses = ['127.0.0.1', '192.168.1.1'] service = PublicAPIService(qrlnode) # Find a transaction alice_xmss = get_alice_xmss(8) context = Mock(spec=ServicerContext) bob_xmss = get_bob_xmss(4) txs = [] for i in range(0, 100): tx = TransferTransaction.create(addrs_to=[bob_xmss.address], amounts=[10], message_data=None, fee=0, xmss_pk=alice_xmss.pk) tx.sign(alice_xmss) txs.append(tx) addresses_set = {bob_xmss.address, alice_xmss.address} block = Block.create(config.dev, 10, b'', 100000000, txs, alice_xmss.address, seed_height=0, seed_hash=None) state_container = chain_manager.new_state_container(addresses_set, 5, True, None) for tx in txs: chain_manager.update_state_container(tx, state_container) address_state = state_container.addresses_state[tx.addr_from] state_container.paginated_tx_hash.insert(address_state, tx.txhash) address_state = state_container.addresses_state[tx.addrs_to[0]] state_container.paginated_tx_hash.insert(address_state, tx.txhash) state_container.paginated_tx_hash.put_paginated_data(None) OptimizedAddressState.put_optimized_addresses_state(db_state, state_container.addresses_state) TransactionMetadata.update_tx_metadata(db_state, block, None) request = qrl_pb2.GetMiniTransactionsByAddressReq(address=alice_xmss.address, item_per_page=10, page_number=1) response = service.GetMiniTransactionsByAddress(request=request, context=context) context.set_code.assert_not_called() self.assertEqual(len(response.mini_transactions), 10) for i in range(0, 10): self.assertEqual(bin2hstr(txs[len(txs) - i - 1].txhash), response.mini_transactions[i].transaction_hash) self.assertEqual(response.balance, 0) TransactionMetadata.update_tx_metadata(db_state, block, None) request = qrl_pb2.GetMiniTransactionsByAddressReq(address=alice_xmss.address, item_per_page=8, page_number=3) response = service.GetMiniTransactionsByAddress(request=request, context=context) context.set_code.assert_not_called() self.assertEqual(len(response.mini_transactions), 8) for i in range(0, 8): self.assertEqual(bin2hstr(txs[len(txs) - i - 17].txhash), response.mini_transactions[i].transaction_hash) self.assertEqual(response.balance, 0)
def update_token_balance(self, token_tx_hash: bytes, balance: int): str_token_tx_hash = bin2hstr(token_tx_hash) self._data.tokens[str_token_tx_hash] += balance if self._data.tokens[str_token_tx_hash] == 0: del self._data.tokens[str_token_tx_hash]
def ST(self, data): """ Stake Transaction This function processes whenever a Transaction having subtype ST is received. :return: """ try: st = Transaction.from_json(data) except Exception as e: logger.error( 'st rejected - unable to decode serialised data - closing connection' ) logger.exception(e) self.transport.loseConnection() return if not self.factory.master_mr.isRequested(st.get_message_hash(), self): return if len(self.factory.buffered_chain._chain.blockchain) == 1 and \ st.activation_blocknumber > self.factory.buffered_chain.height + config.dev.blocks_per_epoch: return height = self.factory.buffered_chain.height + 1 stake_validators_tracker = self.factory.buffered_chain.get_stake_validators_tracker( height) if st.txfrom in stake_validators_tracker.future_stake_addresses: logger.debug( 'P2P dropping st as staker is already in future_stake_address %s', st.txfrom) return if st.txfrom in stake_validators_tracker.sv_dict: expiry = stake_validators_tracker.sv_dict[ st.txfrom].activation_blocknumber + config.dev.blocks_per_epoch if st.activation_blocknumber < expiry: logger.debug( 'P2P dropping st txn as it is already active for the given range %s', st.txfrom) return if st.activation_blocknumber > height + config.dev.blocks_per_epoch: logger.debug( 'P2P dropping st as activation_blocknumber beyond limit') return False for t in self.factory.buffered_chain.tx_pool.transaction_pool: if st.get_message_hash() == t.get_message_hash(): return tx_state = self.factory.buffered_chain.get_stxn_state( blocknumber=self.factory.buffered_chain.height + 1, addr=st.txfrom) if st.validate() and st.validate_extended(tx_state=tx_state): self.factory.buffered_chain.tx_pool.add_tx_to_pool(st) else: logger.warning('>>>ST %s invalid state validation failed..', bin2hstr(tuple(st.hash))) return self.factory.register_and_broadcast('ST', st.get_message_hash(), st.to_json())
def get_token_balance(self, token_tx_hash: bytes) -> int: str_token_tx_hash = bin2hstr(token_tx_hash) if str_token_tx_hash in self._data.tokens: return self._data.tokens[str_token_tx_hash] return 0
def validate_extended(self, addr_from_state: AddressState, addr_from_pk_state: AddressState): if not self.validate_slave(addr_from_state, addr_from_pk_state): return False tx_balance = addr_from_state.balance if not AddressState.address_is_valid(self.addr_from): logger.warning('Invalid address addr_from: %s', self.addr_from) return False if not AddressState.address_is_valid(self.owner): logger.warning('Invalid address owner_addr: %s', self.owner) return False for address_balance in self.initial_balances: if not AddressState.address_is_valid(address_balance.address): logger.warning('Invalid address address in initial_balances: %s', address_balance.address) return False if tx_balance < self.fee: logger.info('TokenTxn State validation failed for %s because: Insufficient funds', bin2hstr(self.txhash)) logger.info('balance: %s, Fee: %s', tx_balance, self.fee) return False if addr_from_pk_state.ots_key_reuse(self.ots_key): logger.info('TokenTxn State validation failed for %s because: OTS Public key re-use detected', bin2hstr(self.txhash)) return False return True
def validate_extended(self, addr_from_state: AddressState, addr_from_pk_state: AddressState): if not self.validate_slave(addr_from_state, addr_from_pk_state): return False tx_balance = addr_from_state.balance total_amount = self.total_amount if tx_balance < total_amount + self.fee: logger.info('State validation failed for %s because: Insufficient funds', bin2hstr(self.txhash)) logger.info('balance: %s, fee: %s, amount: %s', tx_balance, self.fee, total_amount) return False if addr_from_pk_state.ots_key_reuse(self.ots_key): logger.info('State validation failed for %s because: OTS Public key re-use detected', bin2hstr(self.txhash)) return False return True
def _validate_extended(self, state_container: StateContainer): if state_container.block_number < state_container.current_dev_config.hard_fork_heights[0]: logger.warning("[MultiSigSpend] Hard Fork Feature not yet activated") return False if len(self.addrs_to) > state_container.current_dev_config.transaction_multi_output_limit: logger.warning('[MultiSigSpend] Number of addresses exceeds max limit') logger.warning('Number of addresses %s', len(self.addrs_to)) logger.warning('Number of amounts %s', len(self.amounts)) return False addr_from_state = state_container.addresses_state[self.addr_from] if self.multi_sig_address not in state_container.addresses_state: logger.error("[MultiSigSpend] Multi Sig address state not found in state_container %s", self.multi_sig_address) return False multi_sig_address_state = state_container.addresses_state[self.multi_sig_address] block_number = state_container.block_number if addr_from_state.address != self.addr_from: logger.error("[MultiSigSpend] Unexpected addr_from_state") logger.error("Expecting State for address %s, but got state for address %s", bin2hstr(self.addr_from), bin2hstr(addr_from_state.address)) return False if multi_sig_address_state.address != self.multi_sig_address: logger.error("[MultiSigSpend] Unexpected multi sig address state") logger.error("Expecting State for address %s, but got state for address %s", bin2hstr(self.multi_sig_address), bin2hstr(multi_sig_address_state.address)) return False tx_balance = addr_from_state.balance total_amount = self.total_amount if tx_balance < self.fee: logger.info('[MultiSigSpend] State validation failed for %s because: Insufficient funds', bin2hstr(self.txhash)) logger.info('address: %s, balance: %s, fee: %s', bin2hstr(self.addr_from), tx_balance, self.fee) return False if multi_sig_address_state.balance < total_amount: logger.info('[MultiSigSpend] State validation failed for %s because: Insufficient funds', bin2hstr(self.txhash)) logger.info('address: %s, balance: %s, fee: %s', bin2hstr(self.multi_sig_address), tx_balance, self.fee) return False # Multi Sig Spend considered to be expired after block having block number equals to # self.expiry_block_number gets added into the main chain if self.expiry_block_number <= block_number: logger.info('[MultiSigSpend] State validation failed due to invalid expiry_block_number', bin2hstr(self.txhash)) logger.info('Chain Height: %s, Expiry Block Number: %s', block_number, self.expiry_block_number) return False if self.addr_from not in multi_sig_address_state.signatories: logger.info('[MultiSigSpend] Address is not in the signatories list', bin2hstr(self.addr_from)) return False return True
def test_transferCoins_sign(self): with set_data_dir('no_data'): with State() as db_state: with set_wallet_dir("test_wallet"): p2p_factory = Mock(spec=P2PFactory) p2p_factory.pow = Mock(spec=POW) chain_manager = ChainManager(db_state) qrlnode = QRLNode(db_state, mining_credit_wallet=b'') qrlnode.set_chain_manager(chain_manager) qrlnode._p2pfactory = p2p_factory qrlnode._pow = p2p_factory.pow qrlnode._peer_addresses = ['127.0.0.1', '192.168.1.1'] service = PublicAPIService(qrlnode) context = Mock(spec=ServicerContext) alice = get_alice_xmss() bob = get_bob_xmss() request = qrl_pb2.TransferCoinsReq( addresses_to=[bob.address], amounts=[101], fee=12, xmss_pk=alice.pk) response = service.TransferCoins(request=request, context=context) context.set_code.assert_not_called() context.set_details.assert_not_called() self.assertIsNotNone(response) self.assertIsNotNone( response.extended_transaction_unsigned.tx) self.assertEqual( 'transfer', response.extended_transaction_unsigned.tx.WhichOneof( 'transactionType')) self.assertEqual( 12, response.extended_transaction_unsigned.tx.fee) self.assertEqual( alice.pk, response.extended_transaction_unsigned.tx.public_key) self.assertEqual( 0, response.extended_transaction_unsigned.tx.nonce) self.assertEqual( b'', response.extended_transaction_unsigned.tx.signature) self.assertEqual( b'', response.extended_transaction_unsigned.tx. transaction_hash) self.assertEqual( bob.address, response.extended_transaction_unsigned.tx. transfer.addrs_to[0]) self.assertEqual( 101, response.extended_transaction_unsigned.tx. transfer.amounts[0]) tmp_hash_pre = bytes( QRLHelper.getAddress( response.extended_transaction_unsigned.tx. public_key)) tmp_hash_pre += str(response.extended_transaction_unsigned. tx.fee).encode() tmp_hash_pre += response.extended_transaction_unsigned.tx.transfer.addrs_to[ 0] tmp_hash_pre += str(response.extended_transaction_unsigned. tx.transfer.amounts[0]).encode() self.assertEqual( '010300a1da274e68c88b0ccf448e0b1916fa789b01eb2ed4e9ad565ce264c939078' '2a9c61ac02f31320103001d65d7e59aed5efbeae64246e0f3184d7c42411421eb38' '5ba30f2c1c005a85ebc4419cfd313031', bin2hstr(tmp_hash_pre)) tmp_hash = sha256(tmp_hash_pre) self.assertEqual( '3645f2819aba65479f9a7fad3f5d7a41a9357410a595fa02fb947bfe3ed96e0f', bin2hstr(tmp_hash)) signed_transaction = response.extended_transaction_unsigned.tx signed_transaction.signature = alice.sign(tmp_hash) req_push = qrl_pb2.PushTransactionReq( transaction_signed=signed_transaction) resp_push = service.PushTransaction(req_push, context=context) context.set_code.assert_not_called() context.set_details.assert_not_called() self.assertIsNotNone(resp_push) self.assertEqual(qrl_pb2.PushTransactionResp.SUBMITTED, resp_push.error_code) self.assertEqual( '30955fdc5e2d9dbe5fb9bf812f2e1b6c4b409a8a7c7a75f1c3e9ba1ffdd8e60e', bin2hstr(resp_push.tx_hash))
def put_block(self, block, batch): self._db.put_raw(bin2hstr(block.headerhash).encode(), block.to_json().encode(), batch)
def main(): args = parse_arguments() qrl_dir_post_fix = '' copy_files = [] if args.network_type == 'testnet': config.dev.hard_fork_heights[0] = config.dev.testnet_hard_fork_heights[0] # Hard Fork Block Height For Testnet qrl_dir_post_fix = '-testnet' package_directory = os.path.dirname(os.path.abspath(__file__)) copy_files.append(os.path.join(package_directory, 'network/testnet/genesis.yml')) copy_files.append(os.path.join(package_directory, 'network/testnet/config.yml')) logger.debug("=====================================================================================") logger.info("QRL Path: %s", args.qrl_dir) config.user.qrl_dir = os.path.expanduser(os.path.normpath(args.qrl_dir) + qrl_dir_post_fix) config.create_path(config.user.qrl_dir, copy_files) config.user.load_yaml(config.user.config_path) if args.mining_thread_count is None: args.mining_thread_count = config.user.mining_thread_count logger.debug("=====================================================================================") config.create_path(config.user.wallet_dir) mining_address = None ntp.setDrift() logger.info('Initializing chain..') persistent_state = State() if args.mocknet: args.debug = True config.user.mining_enabled = True config.user.mining_thread_count = 1 config.user.mining_pause = 500 config.dev.pbdata.block.block_timing_in_seconds = 1 config.user.genesis_difficulty = 2 # Mocknet mining address # Q01050058bb3f8cb66fd90d0347478e5bdf3a475e82cfc5fe5dc276500ca21531e6edaf3d2d0f7e # Mocknet mining hexseed # 010500dd70f898c2cb4c11ce7fd85aa04554e41dcc46569871d189a3f48d84e2fbedbe176695e291e9b81e619b3625c624cde6 args.mining_address = 'Q01050058bb3f8cb66fd90d0347478e5bdf3a475e82cfc5fe5dc276500ca21531e6edaf3d2d0f7e' if args.debug: logger.warning("FAULT HANDLER ENABLED") faulthandler.enable() if config.user.mining_enabled: mining_address = get_mining_address(args.mining_address) if not mining_address: logger.warning('Invalid Mining Credit Wallet Address') logger.warning('%s', args.mining_address) return False chain_manager = ChainManager(state=persistent_state) if args.measurement > -1: chain_manager.get_measurement = MagicMock(return_value=args.measurement) chain_manager.load(Block.deserialize(GenesisBlock().serialize())) qrlnode = QRLNode(mining_address=mining_address) qrlnode.set_chain_manager(chain_manager) set_logger.set_logger(args, qrlnode.sync_state) ####### # NOTE: Keep assigned to a variable or might get collected admin_service, grpc_service, mining_service, debug_service = start_services(qrlnode) qrlnode.start_listening() qrlnode.start_pow(args.mining_thread_count) logger.info('QRL blockchain ledger %s', config.dev.version) if config.user.mining_enabled: logger.info('Mining/staking address %s using %s threads (0 = auto)', 'Q' + bin2hstr(mining_address), args.mining_thread_count) elif args.mining_address or args.mining_thread_count: logger.warning('Mining is not enabled but you sent some "mining related" param via CLI') reactor.run()
def contains(self, headerhash: bytes) -> bool: str_headerhash = bin2hstr(headerhash).encode() for state_obj in self._state_loaders: if state_obj.state_code == str_headerhash: return True return False
def send_tx_to_peers(self, tx): logger.info('<<<Transmitting TX: %s', bin2hstr(tx.txhash)) self.register_and_broadcast('TX', tx.get_message_hash(), tx.transaction_to_json()) return
def validate_transaction_pool(self, transaction_pool): for tx_set in transaction_pool: txn = tx_set[1].transaction if txn.txhash == self.txhash: continue if self.PK != txn.PK: continue if txn.ots_key == self.ots_key: logger.info('State validation failed for %s because: OTS Public key re-use detected', bin2hstr(self.txhash)) logger.info('Subtype %s', type(self)) return False return True
def get_ots_from_signature(signature): try: return int(bin2hstr(signature)[0:8], 16) except ValueError: raise ValueError( 'OTS Key Index: First 4 bytes of signature are invalid')
def pre_pos_2(self, data=None): logger.info('pre_pos_2') if self.buffered_chain.height >= 1: return # assign hash terminators to addresses and generate a temporary stake list ordered by st.hash.. tmp_list = [] seed_list = [] genesis_block = self.buffered_chain.get_block(0) total_genesis_stake_amount = 0 for tx in self.buffered_chain.tx_pool.transaction_pool: tx.pbdata.nonce = 1 if tx.subtype == qrl_pb2.Transaction.STAKE: for genesisBalance in genesis_block.genesis_balance: if tx.txfrom == genesisBalance.address.encode( ) and tx.activation_blocknumber == 1: tmp_list.append([ tx.txfrom, tx.hash, 0, genesisBalance.balance, tx.slave_public_key ]) seed_list.append(tx.hash) # FIXME: This goes to stake validator list without verification, Security Risk self.buffered_chain._chain.pstate.stake_validators_tracker.add_sv( genesisBalance.balance, tx, 1) total_genesis_stake_amount += genesisBalance.balance self.buffered_chain.epoch_seed = calc_seed(seed_list) # TODO : Needed to be reviewed later self.buffered_chain.stake_list = sorted( tmp_list, key=lambda staker: score( stake_address=staker[0], reveal_one=bin2hstr( sha256( str( reduce(lambda set1, set2: set1 + set2, tuple(staker[1]))).encode())), balance=staker[3], seed=self.buffered_chain.epoch_seed)) # self.buffered_chain.epoch_seed = format(self.buffered_chain.epoch_seed, 'x') # FIXME: Why hex string? logger.info('genesis stakers ready = %s / %s', len(self.buffered_chain.stake_list), config.dev.minimum_required_stakers) logger.info('node address: %s', self.buffered_chain.staking_address) # stake pool still not full..reloop.. if len(self.buffered_chain.stake_list ) < config.dev.minimum_required_stakers: self.p2p_factory.broadcast_st(data) logger.info('waiting for stakers.. retry in 5s') reactor.callID = reactor.callLater(5, self.pre_pos_2, data) return voteMetadata = self.buffered_chain.get_consensus(0) consensus_ratio = voteMetadata.total_stake_amount / total_genesis_stake_amount if consensus_ratio < 0.51: logger.info('Consensus lower than 51%%.. retry in 5s') reactor.callID = reactor.callLater(5, self.pre_pos_2, data) return if self.buffered_chain.staking_address == self.buffered_chain.stake_list[ 0][0]: logger.info('designated to create block 1: building block..') tmphc = hashchain(self.buffered_chain.wallet.address_bundle[0]. xmss.get_seed_private()) # create the genesis block 2 here.. reveal_hash = self.buffered_chain.select_hashchain( self.buffered_chain.staking_address, tmphc.hashchain, blocknumber=1) b = self.buffered_chain.create_block( reveal_hash[-2]) # FIXME: This is incorrect, rewire self.pre_block_logic(b) # FIXME: Ignore return value? else: logger.info('await block creation by stake validator: %s', self.buffered_chain.stake_list[0][0]) self.last_bk_time = time.time() self.restart_unsynced_logic()
def MR(self, data): """ Message Receipt This function accepts message receipt from peer, checks if the message hash already been received or not. In case its a already received message, it is ignored. Otherwise the request is made to get the full message. :return: """ mr_data = qrl_pb2.MR() try: Parse(data, mr_data) except Exception as e: # Disconnect peer not following protocol logger.debug( 'Disconnected peer %s not following protocol in MR %s', self.conn_identity, e) self.transport.loseConnection() msg_hash = mr_data.hash if mr_data.type not in MessageReceipt.allowed_types: return if mr_data.type in [ 'TX' ] and self.factory.sync_state.state != ESyncState.synced: return if mr_data.type == 'TX': if len(self.factory.buffered_chain.tx_pool.pending_tx_pool ) >= config.dev.transaction_pool_size: logger.warning( 'TX pool size full, incoming tx dropped. mr hash: %s', bin2hstr(msg_hash)) return if mr_data.type == 'ST' or mr_data.type == 'VT': if self.factory.buffered_chain.height > 1 and self.factory.sync_state.state != ESyncState.synced: return if self.factory.master_mr.contains(msg_hash, mr_data.type): return self.factory.master_mr.add_peer(msg_hash, mr_data.type, self, mr_data) if self.factory.master_mr.is_callLater_active( msg_hash): # Ignore if already requested return if mr_data.type == 'BK': block_chain_buffer = self.factory.buffered_chain if not block_chain_buffer.verify_BK_hash(mr_data, self.conn_identity): if block_chain_buffer.is_duplicate_block( block_idx=mr_data.block_number, prev_headerhash=mr_data.prev_headerhash, stake_selector=mr_data.stake_selector): self.factory.RFM(mr_data) return blocknumber = mr_data.block_number target_blocknumber = block_chain_buffer.bkmr_tracking_blocknumber( self.factory.ntp) if target_blocknumber != self.factory.bkmr_blocknumber: self.factory.bkmr_blocknumber = target_blocknumber del self.factory.bkmr_priorityq self.factory.bkmr_priorityq = PriorityQueue() if blocknumber != target_blocknumber or blocknumber == 1: self.factory.RFM(mr_data) return score = block_chain_buffer.score_BK_hash(mr_data) self.factory.bkmr_priorityq.put((score, msg_hash)) if not self.factory.bkmr_processor.active(): self.factory.bkmr_processor = reactor.callLater( 1, self.factory.select_best_bkmr) return self.factory.RFM(mr_data)
def test_getAddress(self): address = helper.qrladdress('mySeed') self.assertEqual( '00020080a24d25a75c99077719c6b5077b0ae16cf243f69d142e848075e985dbb28df7fbcd5acf', bin2hstr(address))
def is_token_exists(self, token_tx_hash: bytes) -> bool: str_token_tx_hash = bin2hstr(token_tx_hash) if str_token_tx_hash in self._data.tokens: return True return False
def create_block(self, last_block, mining_nonce, tx_pool, signing_xmss, master_address) -> Optional[Block]: # TODO: Persistence will move to rocksdb # FIXME: Difference between this and create block????????????? # FIXME: Break encapsulation if not self._dummy_xmss: self._dummy_xmss = Wallet.get_new_address( signature_tree_height=signing_xmss.height) dummy_block = Block.create(block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=[], signing_xmss=self._dummy_xmss.xmss, master_address=master_address, nonce=0) dummy_block.set_mining_nonce(mining_nonce) t_pool2 = copy.deepcopy(tx_pool.transaction_pool) del tx_pool.transaction_pool[:] ###### # recreate the transaction pool as in the tx_hash_list, ordered by txhash.. total_txn = len(t_pool2) txnum = 0 addresses_set = set() while txnum < total_txn: tx = t_pool2[txnum] tx.set_effected_address(addresses_set) txnum += 1 addresses_state = dict() for address in addresses_set: addresses_state[address] = self.state.get_address(address) block_size = dummy_block.size block_size_limit = self.state.get_block_size_limit(last_block) txnum = 0 while txnum < total_txn: tx = t_pool2[txnum] # Skip Transactions for later, which doesn't fit into block if block_size + tx.size + config.dev.tx_extra_overhead > block_size_limit: txnum += 1 continue addr_from_pk_state = addresses_state[tx.addr_from] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = addresses_state[addr_from_pk] if addr_from_pk_state.ots_key_reuse(tx.ots_key): del t_pool2[txnum] total_txn -= 1 continue if isinstance(tx, TransferTransaction): if addresses_state[ tx.addr_from].balance < tx.total_amount + tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.addr_from) logger.warning('type: %s', tx.type) logger.warning( 'Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.addr_from].balance, tx.total_amount) del t_pool2[txnum] total_txn -= 1 continue if isinstance(tx, MessageTransaction): if addresses_state[tx.addr_from].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid message tx', tx, tx.addr_from) logger.warning('type: %s', tx.type) logger.warning('Buffer State Balance: %s Free %s', addresses_state[tx.addr_from].balance, tx.fee) total_txn -= 1 continue if isinstance(tx, TokenTransaction): if addresses_state[tx.addr_from].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.addr_from) logger.warning('type: %s', tx.type) logger.warning('Buffer State Balance: %s Fee %s', addresses_state[tx.addr_from].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if isinstance(tx, TransferTokenTransaction): if addresses_state[tx.addr_from].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.addr_from) logger.warning('type: %s', tx.type) logger.warning( 'Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.addr_from].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if bin2hstr(tx.token_txhash).encode() not in addresses_state[ tx.addr_from].tokens: logger.warning( '%s doesnt own any token with token_txnhash %s', tx.addr_from, bin2hstr(tx.token_txhash).encode()) del t_pool2[txnum] total_txn -= 1 continue if addresses_state[tx.addr_from].tokens[bin2hstr( tx.token_txhash).encode()] < tx.total_amount: logger.warning( 'Token Transfer amount exceeds available token') logger.warning('Token Txhash %s', bin2hstr(tx.token_txhash).encode()) logger.warning( 'Available Token Amount %s', addresses_state[tx.addr_from].tokens[bin2hstr( tx.token_txhash).encode()]) logger.warning('Transaction Amount %s', tx.total_amount) del t_pool2[txnum] total_txn -= 1 continue if isinstance(tx, LatticePublicKey): if addresses_state[tx.addr_from].balance < tx.fee: logger.warning( 'Lattice TXN %s %s exceeds balance, invalid tx', tx, tx.addr_from) logger.warning('type: %s', tx.type) logger.warning( 'Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.addr_from].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if isinstance(tx, SlaveTransaction): if addresses_state[tx.addr_from].balance < tx.fee: logger.warning( 'Slave TXN %s %s exceeds balance, invalid tx', tx, tx.addr_from) logger.warning('type: %s', tx.type) logger.warning( 'Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.addr_from].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue tx.apply_on_state(addresses_state) tx_pool.add_tx_to_pool(tx) tx._data.nonce = addresses_state[tx.addr_from].nonce txnum += 1 block_size += tx.size + config.dev.tx_extra_overhead coinbase_nonce = self.state.get_address(signing_xmss.address).nonce if signing_xmss.address in addresses_state: coinbase_nonce = addresses_state[signing_xmss.address].nonce + 1 block = Block.create(block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=t_pool2, signing_xmss=signing_xmss, master_address=master_address, nonce=coinbase_nonce) return block
def create_block(self, last_block, mining_nonce, tx_pool, signing_xmss, master_address) -> Optional[Block]: # TODO: Persistence will move to rocksdb # FIXME: Difference between this and create block????????????? # FIXME: Break encapsulation dummy_block = Block.create(mining_nonce=mining_nonce, block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=[], signing_xmss=signing_xmss, master_address=master_address, nonce=0) dummy_block.set_mining_nonce(mining_nonce) signing_xmss.set_index(signing_xmss.get_index() - 1) t_pool2 = copy.deepcopy(tx_pool.transaction_pool) del tx_pool.transaction_pool[:] ###### # recreate the transaction pool as in the tx_hash_list, ordered by txhash.. total_txn = len(t_pool2) txnum = 0 addresses_set = set() while txnum < total_txn: tx = t_pool2[txnum] tx.set_effected_address(addresses_set) txnum += 1 addresses_state = dict() for address in addresses_set: addresses_state[address] = self.state.get_address(address) block_size = dummy_block.size block_size_limit = self.state.get_block_size_limit(last_block) txnum = 0 while txnum < total_txn: tx = t_pool2[txnum] # Skip Transactions for later, which doesn't fit into block if block_size + tx.size + config.dev.tx_extra_overhead > block_size_limit: txnum += 1 continue addr_from_pk_state = addresses_state[tx.txfrom] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = addresses_state[addr_from_pk] if tx.ots_key_reuse(addr_from_pk_state, tx.ots_key): del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TRANSFER: if addresses_state[tx.txfrom].balance < tx.amount + tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.amount) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.MESSAGE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid message tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Free %s', addresses_state[tx.txfrom].balance, tx.fee) total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TOKEN: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Fee %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TRANSFERTOKEN: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if bin2hstr(tx.token_txhash).encode() not in addresses_state[tx.txfrom].tokens: logger.warning('%s doesnt own any token with token_txnhash %s', tx.txfrom, bin2hstr(tx.token_txhash).encode()) del t_pool2[txnum] total_txn -= 1 continue if addresses_state[tx.txfrom].tokens[bin2hstr(tx.token_txhash).encode()] < tx.amount: logger.warning('Token Transfer amount exceeds available token') logger.warning('Token Txhash %s', bin2hstr(tx.token_txhash).encode()) logger.warning('Available Token Amount %s', addresses_state[tx.txfrom].tokens[bin2hstr(tx.token_txhash).encode()]) logger.warning('Transaction Amount %s', tx.amount) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.LATTICE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('Lattice TXN %s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.SLAVE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('Slave TXN %s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue tx.apply_on_state(addresses_state) tx_pool.add_tx_to_pool(tx) tx._data.nonce = addresses_state[tx.txfrom].nonce txnum += 1 block_size += tx.size + config.dev.tx_extra_overhead coinbase_nonce = self.state.get_address(signing_xmss.get_address()).nonce if signing_xmss.get_address() in addresses_state: coinbase_nonce = addresses_state[signing_xmss.get_address()].nonce + 1 block = Block.create(mining_nonce=mining_nonce, block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=t_pool2, signing_xmss=signing_xmss, master_address=master_address, nonce=coinbase_nonce) return block
def put_tx_metadata(self, txn, block_number, timestamp, batch): try: self._db.put(bin2hstr(txn.txhash), [txn.to_json(), block_number, timestamp], batch) except Exception: pass
def handle_message_received(source, message: qrllegacy_pb2.LegacyMessage): """ Message Receipt This function accepts message receipt from peer, checks if the message hash already been received or not. In case its a already received message, it is ignored. Otherwise the request is made to get the full message. :return: """ mr_data = message.mrData msg_hash = mr_data.hash # FIXME: Separate into respective message handlers if mr_data.type not in MessageReceipt.allowed_types: return if mr_data.type == qrllegacy_pb2.LegacyMessage.TX and source.factory.sync_state.state != ESyncState.synced: return if mr_data.type == qrllegacy_pb2.LegacyMessage.TX: if ntp.getTime() < source.factory.pow.suspend_mining_timestamp: return if source.factory._chain_manager.tx_pool.is_full_pending_transaction_pool(): logger.warning('TX pool size full, incoming tx dropped. mr hash: %s', bin2hstr(msg_hash)) return if mr_data.type == qrllegacy_pb2.LegacyMessage.BK: if mr_data.block_number > source.factory.chain_height + config.dev.max_margin_block_number: logger.debug('Skipping block #%s as beyond lead limit', mr_data.block_number) return if mr_data.block_number < source.factory.chain_height - config.dev.min_margin_block_number: logger.debug('Skipping block #%s as beyond the limit', mr_data.block_number) return if not source.factory.is_block_present(mr_data.prev_headerhash): logger.debug('Skipping block #%s as prev_headerhash not found', mr_data.block_number) return if source.factory.master_mr.contains(msg_hash, mr_data.type): return source.factory.master_mr.add_peer(msg_hash, mr_data.type, source, mr_data) if source.factory.master_mr.is_callLater_active(msg_hash): # Ignore if already requested return source.factory.request_full_message(mr_data)
def remove_tx_metadata(self, txn, batch): try: self._db.delete(bin2hstr(txn.txhash).encode(), batch) except Exception: pass
def state_update_genesis(self, chain, block, address_txn): # Start Updating coin base txn tx = block.transactions[0] # Expecting only 1 txn of COINBASE subtype in genesis block pubhash = tx.generate_pubhash(tx.PK, tx.ots_key) if tx.nonce != 1: logger.warning('nonce incorrect, invalid tx') logger.warning('subtype: %s', tx.subtype) logger.warning('%s actual: %s expected: %s', tx.txfrom, tx.nonce, address_txn[tx.txfrom][0] + 1) return False # TODO: To be fixed later if pubhash in address_txn[tx.txfrom][2]: logger.warning('pubkey reuse detected: invalid tx %s', tx.txhash) logger.warning('subtype: %s', tx.subtype) return False address_txn[tx.txto][1] += tx.amount address_txn[tx.txfrom][2].append(pubhash) # Coinbase update end here tmp_list = [] for tx in block.transactions: if tx.subtype == TX_SUBTYPE_STAKE: # update txfrom, hash and stake_nonce against genesis for current or next stake_list tmp_list.append( [tx.txfrom, tx.hash, 0, tx.first_hash, GenesisBlock().get_info()[tx.txfrom], tx.slave_public_key]) if tx.txfrom == block.blockheader.stake_selector: if tx.txfrom in chain.m_blockchain[0].stake_list: self.stake_validators_list.add_sv(tx.txfrom, tx.slave_public_key, tx.hash, tx.first_hash, tx.balance) self.stake_validators_list.sv_list[tx.txfrom].nonce += 1 else: logger.warning('designated staker not in genesis..') return False else: if tx.txfrom in chain.m_blockchain[0].stake_list: self.stake_validators_list.add_sv(tx.txfrom, tx.slave_public_key, tx.hash, tx.first_hash, tx.balance) else: self.stake_validators_list.add_next_sv(tx.txfrom, tx.slave_public_key, tx.hash, tx.first_hash, tx.balance) pubhash = tx.generate_pubhash(tx.PK, tx.ots_key) address_txn[tx.txfrom][2].append(pubhash) epoch_seed = self.stake_validators_list.calc_seed() chain.block_chain_buffer.epoch_seed = epoch_seed self.put_epoch_seed(epoch_seed) chain.block_chain_buffer.epoch_seed = chain.state.calc_seed(tmp_list) chain.stake_list = sorted(tmp_list, key=lambda staker: chain.score(stake_address=staker[0], reveal_one=bin2hstr(sha256(reduce( lambda set1, set2: set1 + set2, staker[1]))), balance=staker[4], seed=chain.block_chain_buffer.epoch_seed)) chain.block_chain_buffer.epoch_seed = format(chain.block_chain_buffer.epoch_seed, 'x') if chain.stake_list[0][0] != block.blockheader.stake_selector: logger.info('stake selector wrong..') return xmss = chain.wallet.address_bundle[0].xmss tmphc = hashchain(xmss.get_seed_private(), epoch=0) chain.hash_chain = tmphc.hashchain chain.wallet.save_wallet() return True
def decrease_txn_count(self, last_count: int, addr: bytes): # FIXME: This should be transactional if last_count == 0: raise ValueError('Cannot decrease transaction count last_count: %s, addr %s', last_count, bin2hstr(addr)) self._db.put(b'txn_count_' + addr, last_count - 1)
def to_plain_address_amount(address_amount): am = qrlwallet_pb2.PlainAddressAmount() am.address = bin2hstr(address_amount.address) am.amount = address_amount.amount return am
def put_block(self, block: Block, batch): self._db.put_raw(bin2hstr(block.headerhash).encode(), block.to_json().encode(), batch)
def test_wrap_message_works(self): veData = qrllegacy_pb2.VEData(version="version", genesis_prev_hash=b'genesis_hash') msg = qrllegacy_pb2.LegacyMessage(func_name=qrllegacy_pb2.LegacyMessage.VE, veData=veData) self.assertEqual('000000191a170a0776657273696f6e120c67656e657369735f68617368', bin2hstr(P2PProtocol._wrap_message(msg)))
def put_block_metadata(self, headerhash: bytes, block_metadata: BlockMetadata, batch): self._db.put_raw(b'metadata_' + bin2hstr(headerhash).encode(), block_metadata.to_json(), batch)
def binvec2hstr(data): return [bin2hstr(b) for b in data]
def slave_tx_generate(ctx, src, master, number_of_slaves, access_type, fee, pk, otsidx): """ Generates Slave Transaction for the wallet """ try: _, src_xmss = _select_wallet(ctx, src) src_xmss.set_ots_index(otsidx) if src_xmss: address_src_pk = src_xmss.pk else: address_src_pk = pk.encode() fee_shor = int(fee * 1.e9) except Exception as e: click.echo("Error validating arguments") quit(1) slave_xmss = [] slave_pks = [] access_types = [] slave_xmss_seed = [] if number_of_slaves > 100: click.echo("Error: Max Limit for the number of slaves is 100") quit(1) for i in range(number_of_slaves): print("Generating Slave #" + str(i + 1)) xmss = XMSS.from_height(config.dev.xmss_tree_height) slave_xmss.append(xmss) slave_xmss_seed.append(xmss.extended_seed) slave_pks.append(xmss.pk) access_types.append(access_type) print("Successfully Generated Slave %s/%s" % (str(i + 1), number_of_slaves)) channel = grpc.insecure_channel(ctx.obj.node_public_address) stub = qrl_pb2_grpc.PublicAPIStub(channel) # FIXME: This could be problematic. Check slaveTxnReq = qrl_pb2.SlaveTxnReq(slave_pks=slave_pks, access_types=access_types, fee=fee_shor, xmss_pk=address_src_pk, master_addr=master.encode()) try: slaveTxnResp = stub.GetSlaveTxn(slaveTxnReq, timeout=5) tx = Transaction.from_pbdata( slaveTxnResp.extended_transaction_unsigned.tx) tx.sign(src_xmss) with open('slaves.json', 'w') as f: json.dump( [bin2hstr(src_xmss.address), slave_xmss_seed, tx.to_json()], f) click.echo('Successfully created slaves.json') click.echo( 'Move slaves.json file from current directory to the mining node inside ~/.qrl/' ) except grpc.RpcError as e: click.echo(e.details()) quit(1) except Exception as e: click.echo("Unhandled error: {}".format(str(e))) quit(1)
def put_block_metadata(self, headerhash, block_metadata, batch): self._db.put_raw(b'metadata_' + bin2hstr(headerhash).encode(), block_metadata.to_json(), batch)
def validate_all(self, state_container: StateContainer, check_nonce=True) -> bool: if state_container.block_number >= state_container.current_dev_config.hard_fork_heights[ 2]: for banned_address in state_container.current_dev_config.banned_address: tx_type = self.pbdata.WhichOneof('transactionType') addr_from_pk = None if tx_type != 'coinbase': addr_from_pk = bytes(QRLHelper.getAddress(self.PK)) if addr_from_pk == banned_address or self.master_addr == banned_address: logger.warning( "Banned QRL Address found in master_addr or pk") return False if tx_type == 'coinbase': if self.pbdata.coinbase.addr_to == banned_address: logger.warning( "Banned QRL Address found in coinbase.addr_to") return False elif tx_type == 'message': if self.pbdata.message.addr_to == banned_address: logger.warning( "Banned QRL Address found in message.addr_to") return False elif tx_type == 'transfer': for addr_to in self.pbdata.transfer.addrs_to: if banned_address == addr_to: logger.warning( "Banned QRL Address found in transfer.addr_to") return False if self.pbdata.WhichOneof('transactionType') == 'coinbase': if not self._validate_extended(state_container): return False return True if not self.validate(True): # It also calls _validate_custom return False if not self.validate_slave(state_container): return False if not self._validate_extended(state_container): return False addr_from_pk = bytes(QRLHelper.getAddress(self.PK)) addr_from_pk_state = state_container.addresses_state[addr_from_pk] expected_nonce = addr_from_pk_state.nonce + 1 if check_nonce and self.nonce != expected_nonce: logger.warning('nonce incorrect, invalid tx') logger.warning('subtype: %s', self.type) logger.warning('%s actual: %s expected: %s', OptimizedAddressState.bin_to_qaddress(addr_from_pk), self.nonce, expected_nonce) return False if state_container.paginated_bitfield.load_bitfield_and_ots_key_reuse( addr_from_pk_state.address, self.ots_key): logger.warning('pubkey reuse detected: invalid tx %s', bin2hstr(self.txhash)) logger.warning('subtype: %s', self.type) return False return True