def test_sync_state_change_synced(self): chain_manager = Mock() chain_manager.height = 0 chain_manager.get_block = MagicMock(return_value=GenesisBlock()) chain_manager.last_block = GenesisBlock() chain_manager.tx_pool = Mock() chain_manager.tx_pool.transaction_pool = [] get_block_metadata_response = Mock() get_block_metadata_response.block_difficulty = StringToUInt256('2') chain_manager.state.get_block_metadata = MagicMock(return_value=get_block_metadata_response) alice_xmss = get_alice_xmss() chain_manager.state.get_address = MagicMock(return_value=AddressState.get_default(alice_xmss.get_address())) chain_manager.state.get_measurement = MagicMock(return_value=60) p2p_factory = Mock() sync_state = Mock() time_provider = Mock() node = POW(chain_manager=chain_manager, p2p_factory=p2p_factory, sync_state=sync_state, time_provider=time_provider, slaves=get_random_master()) self.assertIsNotNone(node) node.update_node_state(ESyncState.synced)
def test_sync_state_change_synced(self): chain_manager = Mock() chain_manager.height = 0 chain_manager.get_block = MagicMock(return_value=GenesisBlock()) chain_manager.last_block = GenesisBlock() chain_manager.tx_pool = Mock() chain_manager.tx_pool.transaction_pool = [] get_block_metadata_response = Mock() get_block_metadata_response.block_difficulty = StringToUInt256('2') chain_manager.state.get_block_metadata = MagicMock(return_value=get_block_metadata_response) alice_xmss = get_alice_xmss() chain_manager.state.get_address = MagicMock(return_value=AddressState.get_default(alice_xmss.address)) chain_manager.state.get_measurement = MagicMock(return_value=60) p2p_factory = Mock() sync_state = Mock() time_provider = Mock() node = POW(chain_manager=chain_manager, p2p_factory=p2p_factory, sync_state=sync_state, time_provider=time_provider, slaves=get_random_master(), mining_thread_count=0) self.assertIsNotNone(node) node.update_node_state(ESyncState.synced)
def test_sync_state_change_synced(self): chain_manager = Mock() chain_manager.height = 0 chain_manager.get_block = MagicMock(return_value=GenesisBlock()) chain_manager.last_block = GenesisBlock() chain_manager.tx_pool = Mock() chain_manager.tx_pool.transaction_pool = [] chain_manager.tx_pool.transactions = chain_manager.tx_pool.transaction_pool get_block_metadata_response = Mock() get_block_metadata_response.block_difficulty = StringToUInt256('2') chain_manager.state.get_block_metadata = MagicMock( return_value=get_block_metadata_response) alice_xmss = get_alice_xmss() chain_manager.state.get_address = MagicMock( return_value=AddressState.get_default(alice_xmss.address)) chain_manager.state.get_measurement = MagicMock(return_value=60) p2p_factory = Mock() sync_state = Mock() time_provider = Mock() # Setting mining enabled False, when update_note_state set to synced, # starts miner which is not exited properly by unit test with set_mining_enabled(False): node = POW(chain_manager=chain_manager, p2p_factory=p2p_factory, sync_state=sync_state, time_provider=time_provider, mining_credit_wallet=get_random_xmss().address, mining_thread_count=0) self.assertIsNotNone(node) node.update_node_state(ESyncState.synced)
def start_pow(self, mining_thread_count): self._pow = POW(chain_manager=self._chain_manager, p2p_factory=self._p2pfactory, sync_state=self._sync_state, time_provider=ntp, mining_address=self.mining_address, mining_thread_count=mining_thread_count) self._pow.start()
def start_pow(self, mining_thread_count): # FIXME: This seems an unexpected side effect. It should be refactored self._pow = POW(chain_manager=self._chain_manager, p2p_factory=self._p2pfactory, sync_state=self._sync_state, time_provider=ntp, mining_credit_wallet=self.mining_credit_wallet, mining_thread_count=mining_thread_count) self._pow.start()
def test_sync_state_change_forked(self): chain_manager = Mock() p2p_factory = Mock() sync_state = Mock() time_provider = Mock() node = POW(chain_manager=chain_manager, p2p_factory=p2p_factory, sync_state=sync_state, time_provider=time_provider, slaves=get_slaves(0, 0)) self.assertIsNotNone(node) node.update_node_state(ESyncState.forked)
def test_sync_state_change_syncing(self): chain_manager = Mock() p2p_factory = Mock() sync_state = Mock() time_provider = Mock() node = POW(chain_manager=chain_manager, p2p_factory=p2p_factory, sync_state=sync_state, time_provider=time_provider, slaves=get_slaves(0, 0), mining_thread_count=0) self.assertIsNotNone(node) node.update_node_state(ESyncState.syncing)
def test_sync_state_change_syncing(self): chain_manager = Mock() p2p_factory = Mock() sync_state = Mock() time_provider = Mock() node = POW(chain_manager=chain_manager, p2p_factory=p2p_factory, sync_state=sync_state, time_provider=time_provider, mining_credit_wallet=get_random_xmss().address, mining_thread_count=0) self.assertIsNotNone(node) node.update_node_state(ESyncState.syncing)
def start_pow(self): # FIXME: This seems an unexpected side effect. It should be refactored self._pow = POW(chain_manager=self._chain_manager, p2p_factory=self._p2pfactory, sync_state=self._sync_state, time_provider=ntp, slaves=self.slaves) self._pow.start()
def test_create(self): chain_manager = Mock() p2p_factory = Mock() sync_state = Mock() time_provider = Mock() node = POW(chain_manager=chain_manager, p2p_factory=p2p_factory, sync_state=sync_state, time_provider=time_provider, mining_address=get_random_xmss().address, mining_thread_count=0) self.assertIsNotNone(node)
class QRLNode: def __init__(self, db_state: State, mining_address: bytes): self.start_time = time.time() self.db_state = db_state self._sync_state = SyncState() self.peer_manager = P2PPeerManager() self.peer_manager.load_peer_addresses() self.peer_manager.register(P2PPeerManager.EventType.NO_PEERS, self.connect_peers) self.p2pchain_manager = P2PChainManager() self.tx_manager = P2PTxManagement() self._chain_manager = None # FIXME: REMOVE. This is temporary self._p2pfactory = None # FIXME: REMOVE. This is temporary self._pow = None self.mining_address = mining_address banned_peers_filename = os.path.join(config.user.wallet_dir, config.dev.banned_peers_filename) self._banned_peers = ExpiringSet( expiration_time=config.user.ban_minutes * 60, filename=banned_peers_filename) reactor.callLater(10, self.monitor_chain_state) #################################################### #################################################### #################################################### #################################################### @property def version(self): # FIXME: Move to __version__ coming from pip return config.dev.version @property def sync_state(self) -> SyncState: return self._sync_state @property def state(self): if self._p2pfactory is None: return ESyncState.unknown.value # FIXME return self._p2pfactory.sync_state.state.value @property def num_connections(self): if self._p2pfactory is None: return 0 # FIXME return self._p2pfactory.connections @property def num_known_peers(self): # FIXME return len(self.peer_addresses) @property def uptime(self): return int(time.time() - self.start_time) @property def block_height(self): return self._chain_manager.height @property def epoch(self): if not self._chain_manager.get_last_block(): return 0 return self._chain_manager.get_last_block( ).block_number // config.dev.blocks_per_epoch @property def uptime_network(self): block_one = self._chain_manager.get_block_by_number(1) network_uptime = 0 if block_one: network_uptime = int(time.time() - block_one.timestamp) return network_uptime @property def block_last_reward(self): if not self._chain_manager.get_last_block(): return 0 return self._chain_manager.get_last_block().block_reward @property def block_time_mean(self): block = self._chain_manager.get_last_block() prev_block_metadata = self._chain_manager.state.get_block_metadata( block.prev_headerhash) if prev_block_metadata is None: return config.dev.mining_setpoint_blocktime movavg = self._chain_manager.state.get_measurement( block.timestamp, block.prev_headerhash, prev_block_metadata) return movavg @property def block_time_sd(self): # FIXME: Keep a moving var return 0 @property def coin_supply(self): # FIXME: Keep a moving var return self.db_state.total_coin_supply() @property def coin_supply_max(self): # FIXME: Keep a moving var return config.dev.max_coin_supply @property def peer_addresses(self): return self.peer_manager._peer_addresses #################################################### #################################################### #################################################### #################################################### def is_banned(self, addr_remote: str): return addr_remote in self._banned_peers def ban_peer(self, peer_obj): self._banned_peers.add(peer_obj.addr_remote) logger.warning('Banned %s', peer_obj.addr_remote) peer_obj.loseConnection() def connect_peers(self): logger.info('<<<Reconnecting to peer list: %s', self.peer_addresses) for peer_address in self.peer_addresses: if self.is_banned(peer_address): continue self._p2pfactory.connect_peer(peer_address) #################################################### #################################################### #################################################### #################################################### def monitor_chain_state(self): self.peer_manager.monitor_chain_state() last_block = self._chain_manager.get_last_block() block_metadata = self.db_state.get_block_metadata( last_block.headerhash) node_chain_state = qrl_pb2.NodeChainState( block_number=last_block.block_number, header_hash=last_block.headerhash, cumulative_difficulty=bytes(block_metadata.cumulative_difficulty), timestamp=int(time.time())) self.peer_manager.broadcast_chain_state( node_chain_state=node_chain_state) channel = self.peer_manager.get_better_difficulty( block_metadata.cumulative_difficulty) logger.debug('Got better difficulty %s', channel) if channel: logger.debug('Connection id >> %s', channel.addr_remote) channel.get_headerhash_list(self._chain_manager.height) reactor.callLater(config.user.chain_state_broadcast_period, self.monitor_chain_state) # FIXME: REMOVE. This is temporary def set_chain_manager(self, chain_manager: ChainManager): self._chain_manager = chain_manager #################################################### #################################################### #################################################### #################################################### def start_pow(self, mining_thread_count): self._pow = POW(chain_manager=self._chain_manager, p2p_factory=self._p2pfactory, sync_state=self._sync_state, time_provider=ntp, mining_address=self.mining_address, mining_thread_count=mining_thread_count) self._pow.start() def start_listening(self): self._p2pfactory = P2PFactory( chain_manager=self._chain_manager, sync_state=self.sync_state, qrl_node=self) # FIXME: Try to avoid cyclic references self._p2pfactory.start_listening() #################################################### #################################################### #################################################### #################################################### @staticmethod def validate_amount(amount_str: str) -> bool: # FIXME: Refactored code. Review Decimal usage all over the code Decimal(amount_str) return True #################################################### #################################################### #################################################### #################################################### @staticmethod def create_token_txn(symbol: bytes, name: bytes, owner: bytes, decimals: int, initial_balances, fee: int, xmss_pk: bytes, master_addr: bytes): return TokenTransaction.create(symbol, name, owner, decimals, initial_balances, fee, xmss_pk, master_addr) @staticmethod def create_transfer_token_txn(addrs_to: list, token_txhash: bytes, amounts: list, fee: int, xmss_pk: bytes, master_addr: bytes): return TransferTokenTransaction.create(token_txhash, addrs_to, amounts, fee, xmss_pk, master_addr) def create_send_tx(self, addrs_to: list, amounts: list, fee: int, xmss_pk: bytes, master_addr: bytes) -> TransferTransaction: addr_from = self.get_addr_from(xmss_pk, master_addr) balance = self.db_state.balance(addr_from) if sum(amounts) + fee > balance: raise ValueError("Not enough funds in the source address") return TransferTransaction.create(addrs_to=addrs_to, amounts=amounts, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) def create_slave_tx(self, slave_pks: list, access_types: list, fee: int, xmss_pk: bytes, master_addr: bytes) -> SlaveTransaction: return SlaveTransaction.create(slave_pks=slave_pks, access_types=access_types, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) def create_lattice_public_key_txn(self, kyber_pk: bytes, dilithium_pk: bytes, fee: int, xmss_pk: bytes, master_addr: bytes) -> SlaveTransaction: return LatticePublicKey.create(kyber_pk=kyber_pk, dilithium_pk=dilithium_pk, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) # FIXME: Rename this appropriately def submit_send_tx(self, tx) -> bool: if tx is None: raise ValueError("The transaction was empty") if self._chain_manager.tx_pool.is_full_pending_transaction_pool(): raise ValueError("Pending Transaction Pool is full") return self._p2pfactory.add_unprocessed_txn( tx, ip=None) # TODO (cyyber): Replace None with IP made API request @staticmethod def get_addr_from(xmss_pk, master_addr): if master_addr: return master_addr return bytes(QRLHelper.getAddress(xmss_pk)) def get_address_is_used(self, address: bytes) -> bool: if not AddressState.address_is_valid(address): raise ValueError("Invalid Address") return self.db_state.address_used(address) def get_address_state(self, address: bytes) -> qrl_pb2.AddressState: if not AddressState.address_is_valid(address): raise ValueError("Invalid Address") address_state = self.db_state.get_address_state(address) return address_state def get_transaction(self, query_hash: bytes): """ This method returns an object that matches the query hash """ # FIXME: At some point, all objects in DB will indexed by a hash # TODO: Search tx hash # FIXME: We dont need searches, etc.. getting a protobuf indexed by hash from DB should be enough # FIXME: This is just a workaround to provide functionality result = self._chain_manager.get_transaction(query_hash) if result: return result[0], result[1] return None, None def get_block_last(self) -> Optional[Block]: """ This method returns an object that matches the query hash """ return self._chain_manager.get_last_block() def get_block_from_hash(self, query_hash: bytes) -> Optional[Block]: """ This method returns an object that matches the query hash """ return self.db_state.get_block(query_hash) def get_block_from_index(self, index: int) -> Block: """ This method returns an object that matches the query hash """ return self.db_state.get_block_by_number(index) def get_blockidx_from_txhash(self, transaction_hash): result = self.db_state.get_tx_metadata(transaction_hash) if result: return result[1] return None def get_token_detailed_list(self): pbdata = self.db_state.get_token_list() token_list = TokenList.from_json(pbdata) token_detailed_list = qrl_pb2.TokenDetailedList() for token_txhash in token_list.token_txhash: token_txn, _ = self.db_state.get_tx_metadata(token_txhash) transaction_extended = qrl_pb2.TransactionExtended( tx=token_txn.pbdata, addr_from=token_txhash.addr_from) token_detailed_list.extended_tokens.extend([transaction_extended]) return token_detailed_list def get_latest_blocks(self, offset, count) -> List[Block]: answer = [] end = self.block_height - offset start = max(0, end - count - offset) for blk_idx in range(start, end + 1): answer.append(self._chain_manager.get_block_by_number(blk_idx)) return answer def get_latest_transactions(self, offset, count): answer = [] skipped = 0 for tx in self.db_state.get_last_txs(): if skipped >= offset: answer.append(tx) if len(answer) >= count: break else: skipped += 1 return answer def get_latest_transactions_unconfirmed(self, offset, count): answer = [] skipped = 0 for tx_set in self._chain_manager.tx_pool.transactions: if skipped >= offset: answer.append(tx_set[1].transaction) if len(answer) >= count: break else: skipped += 1 return answer def getNodeInfo(self) -> qrl_pb2.NodeInfo: info = qrl_pb2.NodeInfo() info.version = self.version info.state = self.state info.num_connections = self.num_connections info.num_known_peers = self.num_known_peers info.uptime = self.uptime info.block_height = self.block_height info.block_last_hash = self._chain_manager.get_last_block( ).headerhash # FIXME info.network_id = config.dev.genesis_prev_headerhash # FIXME return info def get_block_timeseries(self, block_count) -> Iterator[qrl_pb2.BlockDataPoint]: result = [] if self._chain_manager.height == 0: return result block = self._chain_manager.get_last_block() if block is None: return result headerhash_current = block.headerhash while len(result) < block_count: data_point = self._chain_manager.state.get_block_datapoint( headerhash_current) if data_point is None: break result.append(data_point) headerhash_current = data_point.header_hash_prev return reversed(result) #################################################### #################################################### #################################################### #################################################### def broadcast_ephemeral_message( self, encrypted_ephemeral: EncryptedEphemeralMessage) -> bool: if not encrypted_ephemeral.validate(): return False self._p2pfactory.broadcast_ephemeral_message(encrypted_ephemeral) return True def collect_ephemeral_message(self, msg_id): return self.db_state.get_ephemeral_metadata(msg_id) #################################################### #################################################### #################################################### #################################################### def get_blockheader_and_metadata(self, block_number) -> list: if block_number == 0: block_number = self.block_height result = [] block = self.get_block_from_index(block_number) if block: blockheader = block.blockheader blockmetadata = self.db_state.get_block_metadata( blockheader.headerhash) result = [blockheader, blockmetadata] return result def get_block_to_mine(self, wallet_address) -> list: last_block = self._chain_manager.get_last_block() last_block_metadata = self._chain_manager.state.get_block_metadata( last_block.headerhash) return self._pow.miner.get_block_to_mine( wallet_address, self._chain_manager.tx_pool, last_block, last_block_metadata.block_difficulty) def submit_mined_block(self, blob) -> bool: return self._pow.miner.submit_mined_block(blob)
class QRLNode: def __init__(self, mining_address: bytes): self.start_time = ntp.getTime() self._sync_state = SyncState() self.peer_manager = P2PPeerManager() self.peer_manager.load_peer_addresses() self.peer_manager.register(P2PPeerManager.EventType.NO_PEERS, self.connect_peers) self.p2pchain_manager = P2PChainManager() self.tx_manager = P2PTxManagement() self._chain_manager = None # FIXME: REMOVE. This is temporary self._p2pfactory = None # FIXME: REMOVE. This is temporary self._pow = None self.mining_address = mining_address reactor.callLater(10, self.monitor_chain_state) #################################################### #################################################### #################################################### #################################################### @property def version(self): return config.dev.version @property def sync_state(self) -> SyncState: return self._sync_state @property def state(self): if self._p2pfactory is None: return ESyncState.unknown.value # FIXME return self._p2pfactory.sync_state.state.value @property def num_connections(self): if self._p2pfactory is None: return 0 return self._p2pfactory.num_connections @property def num_known_peers(self): return len(self.peer_manager.known_peer_addresses) @property def uptime(self): return ntp.getTime() - self.start_time @property def block_height(self): return self._chain_manager.height @property def epoch(self): if not self._chain_manager.last_block: return 0 return self._chain_manager.last_block.block_number // config.dev.blocks_per_epoch @property def uptime_network(self): block_one = self._chain_manager.get_block_by_number(1) network_uptime = 0 if block_one: network_uptime = ntp.getTime() - block_one.timestamp return network_uptime @property def block_last_reward(self): if not self._chain_manager.last_block: return 0 return self._chain_manager.last_block.block_reward @property def block_time_mean(self): block = self._chain_manager.last_block prev_block_metadata = self._chain_manager.get_block_metadata( block.prev_headerhash) if prev_block_metadata is None: return config.dev.mining_setpoint_blocktime movavg = self._chain_manager.get_measurement(block.timestamp, block.prev_headerhash, prev_block_metadata) return movavg @property def block_time_sd(self): # FIXME: Keep a moving var return 0 @property def coin_supply(self): # FIXME: Keep a moving var return self._chain_manager.total_coin_supply @property def coin_supply_max(self): # FIXME: Keep a moving var return config.dev.max_coin_supply #################################################### #################################################### #################################################### #################################################### def get_peers_stat(self) -> list: return self.peer_manager.get_peers_stat() def connect_peers(self): self.peer_manager.connect_peers() #################################################### #################################################### #################################################### #################################################### def monitor_chain_state(self): self.peer_manager.monitor_chain_state() last_block = self._chain_manager.last_block block_metadata = self._chain_manager.get_block_metadata( last_block.headerhash) node_chain_state = qrl_pb2.NodeChainState( block_number=last_block.block_number, header_hash=last_block.headerhash, cumulative_difficulty=bytes(block_metadata.cumulative_difficulty), version=config.dev.version, timestamp=ntp.getTime()) self.peer_manager.broadcast_chain_state( node_chain_state=node_chain_state) channel = self.peer_manager.get_better_difficulty( block_metadata.cumulative_difficulty) logger.debug('Got better difficulty %s', channel) if channel: logger.debug('Connection id >> %s', channel.peer) channel.send_get_headerhash_list(self._chain_manager.height) reactor.callLater(config.user.chain_state_broadcast_period, self.monitor_chain_state) # FIXME: REMOVE. This is temporary def set_chain_manager(self, chain_manager: ChainManager): self._chain_manager = chain_manager #################################################### #################################################### #################################################### #################################################### def start_pow(self, mining_thread_count): self._pow = POW(chain_manager=self._chain_manager, p2p_factory=self._p2pfactory, sync_state=self._sync_state, time_provider=ntp, mining_address=self.mining_address, mining_thread_count=mining_thread_count) self._pow.start() def start_listening(self): self._p2pfactory = P2PFactory( chain_manager=self._chain_manager, sync_state=self.sync_state, qrl_node=self) # FIXME: Try to avoid cyclic references self.peer_manager._p2pfactory = self._p2pfactory self._p2pfactory.start_listening() #################################################### #################################################### #################################################### #################################################### @staticmethod def validate_amount(amount_str: str) -> bool: # FIXME: Refactored code. Review Decimal usage all over the code Decimal(amount_str) return True #################################################### #################################################### #################################################### #################################################### @staticmethod def create_message_txn(message_hash: bytes, fee: int, xmss_pk: bytes, master_addr: bytes): return MessageTransaction.create(message_hash=message_hash, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) @staticmethod def create_token_txn(symbol: bytes, name: bytes, owner: bytes, decimals: int, initial_balances, fee: int, xmss_pk: bytes, master_addr: bytes): return TokenTransaction.create(symbol, name, owner, decimals, initial_balances, fee, xmss_pk, master_addr) @staticmethod def create_transfer_token_txn(addrs_to: list, token_txhash: bytes, amounts: list, fee: int, xmss_pk: bytes, master_addr: bytes): return TransferTokenTransaction.create(token_txhash, addrs_to, amounts, fee, xmss_pk, master_addr) def create_send_tx(self, addrs_to: list, amounts: list, fee: int, xmss_pk: bytes, master_addr: bytes) -> TransferTransaction: addr_from = self.get_addr_from(xmss_pk, master_addr) balance = self._chain_manager.get_address_balance(addr_from) if sum(amounts) + fee > balance: raise ValueError("Not enough funds in the source address") return TransferTransaction.create(addrs_to=addrs_to, amounts=amounts, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) def create_slave_tx(self, slave_pks: list, access_types: list, fee: int, xmss_pk: bytes, master_addr: bytes) -> SlaveTransaction: return SlaveTransaction.create(slave_pks=slave_pks, access_types=access_types, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) # FIXME: Rename this appropriately def submit_send_tx(self, tx) -> bool: if tx is None: raise ValueError("The transaction was empty") if self._chain_manager.tx_pool.is_full_pending_transaction_pool(): raise ValueError("Pending Transaction Pool is full") return self._p2pfactory.add_unprocessed_txn( tx, ip=None) # TODO (cyyber): Replace None with IP made API request @staticmethod def get_addr_from(xmss_pk, master_addr): if master_addr: return master_addr return bytes(QRLHelper.getAddress(xmss_pk)) def get_address_is_used(self, address: bytes) -> bool: if not AddressState.address_is_valid(address): raise ValueError("Invalid Address") return self._chain_manager.get_address_is_used(address) def get_address_state(self, address: bytes) -> AddressState: if address != config.dev.coinbase_address and not AddressState.address_is_valid( address): raise ValueError("Invalid Address") address_state = self._chain_manager.get_address_state(address) return address_state def get_all_address_state(self) -> list: return self._chain_manager.get_all_address_state() def get_transaction(self, query_hash: bytes): """ This method returns an object that matches the query hash """ # FIXME: At some point, all objects in DB will indexed by a hash # TODO: Search tx hash # FIXME: We dont need searches, etc.. getting a protobuf indexed by hash from DB should be enough # FIXME: This is just a workaround to provide functionality result = self._chain_manager.get_tx_metadata(query_hash) return result def get_unconfirmed_transaction(self, query_hash: bytes): result = self._chain_manager.get_unconfirmed_transaction(query_hash) return result def get_block_last(self) -> Optional[Block]: """ This method returns an object that matches the query hash """ return self._chain_manager.last_block def get_block_from_hash(self, query_hash: bytes) -> Optional[Block]: """ This method returns an object that matches the query hash """ return self._chain_manager.get_block(query_hash) def get_block_from_index(self, index: int) -> Block: """ This method returns an object that matches the query hash """ return self._chain_manager.get_block_by_number(index) def get_blockidx_from_txhash(self, transaction_hash): result = self._chain_manager.get_tx_metadata(transaction_hash) if result: return result[1] return None def get_latest_blocks(self, offset, count) -> List[Block]: answer = [] end = self.block_height - offset start = max(0, end - count + 1) for blk_idx in range(start, end + 1): answer.append(self._chain_manager.get_block_by_number(blk_idx)) return answer def get_latest_transactions(self, offset, count): answer = [] skipped = 0 for tx in self._chain_manager.get_last_transactions(): if skipped >= offset: answer.append(tx) if len(answer) >= count: break else: skipped += 1 return answer def get_latest_transactions_unconfirmed(self, offset, count): answer = [] skipped = 0 for tx_set in self._chain_manager.tx_pool.transactions: if skipped >= offset: answer.append(tx_set[1]) if len(answer) >= count: break else: skipped += 1 return answer def get_node_info(self) -> qrl_pb2.NodeInfo: info = qrl_pb2.NodeInfo() info.version = self.version info.state = self.state info.num_connections = self.num_connections info.num_known_peers = self.num_known_peers info.uptime = self.uptime info.block_height = self.block_height info.block_last_hash = self._chain_manager.last_block.headerhash info.network_id = config.user.genesis_prev_headerhash return info def get_block_timeseries(self, block_count) -> Iterator[qrl_pb2.BlockDataPoint]: result = [] if self.block_height <= 0: return result block = self._chain_manager.last_block if block is None: return result headerhash_current = block.headerhash while len(result) < block_count: data_point = self._chain_manager.get_block_datapoint( headerhash_current) if data_point is None: break result.append(data_point) headerhash_current = data_point.header_hash_prev return reversed(result) def get_blockheader_and_metadata(self, block_number=0) -> Tuple: return self._chain_manager.get_blockheader_and_metadata(block_number) def get_block_to_mine(self, wallet_address) -> list: return self._chain_manager.get_block_to_mine(self._pow.miner, wallet_address) def submit_mined_block(self, blob) -> bool: return self._pow.miner.submit_mined_block(blob)
class QRLNode: def __init__(self, mining_address: bytes): self.start_time = ntp.getTime() self._sync_state = SyncState() self.peer_manager = P2PPeerManager() self.peer_manager.load_peer_addresses() self.p2pchain_manager = P2PChainManager() self.tx_manager = P2PTxManagement() self._chain_manager = None # FIXME: REMOVE. This is temporary self._p2pfactory = None # FIXME: REMOVE. This is temporary self._pow = None self.mining_address = mining_address reactor.callLater(10, self.monitor_chain_state) #################################################### #################################################### #################################################### #################################################### @property def version(self): return config.dev.version @property def sync_state(self) -> SyncState: return self._sync_state @property def state(self): if self._p2pfactory is None: return ESyncState.unknown.value # FIXME return self._p2pfactory.sync_state.state.value @property def num_connections(self): if self._p2pfactory is None: return 0 return self._p2pfactory.num_connections @property def num_known_peers(self): return len(self.peer_manager.known_peer_addresses) @property def uptime(self): return ntp.getTime() - self.start_time @property def block_height(self): return self._chain_manager.height @property def epoch(self): if not self._chain_manager.last_block: return 0 return self._chain_manager.last_block.block_number // config.dev.blocks_per_epoch @property def uptime_network(self): block_one = self._chain_manager.get_block_by_number(1) network_uptime = 0 if block_one: network_uptime = ntp.getTime() - block_one.timestamp return network_uptime @property def block_last_reward(self): if not self._chain_manager.last_block: return 0 return self._chain_manager.last_block.block_reward @property def block_time_mean(self): block = self._chain_manager.last_block prev_block_metadata = self._chain_manager.get_block_metadata(block.prev_headerhash) if prev_block_metadata is None: return config.dev.block_timing_in_seconds movavg = self._chain_manager.get_measurement(config.dev, block.timestamp, block.prev_headerhash, prev_block_metadata) return movavg @property def block_time_sd(self): # FIXME: Keep a moving var return 0 @property def coin_supply(self): # FIXME: Keep a moving var return self._chain_manager.total_coin_supply @property def coin_supply_max(self): # FIXME: Keep a moving var return config.dev.max_coin_supply #################################################### #################################################### #################################################### #################################################### def get_peers_stat(self) -> list: return self.peer_manager.get_peers_stat() #################################################### #################################################### #################################################### #################################################### def monitor_chain_state(self): self.peer_manager.monitor_chain_state() last_block = self._chain_manager.last_block block_metadata = self._chain_manager.get_block_metadata(last_block.headerhash) node_chain_state = qrl_pb2.NodeChainState(block_number=last_block.block_number, header_hash=last_block.headerhash, cumulative_difficulty=bytes(block_metadata.cumulative_difficulty), version=config.dev.version, timestamp=ntp.getTime()) self.peer_manager.broadcast_chain_state(node_chain_state=node_chain_state) channel = self.peer_manager.get_better_difficulty(block_metadata.cumulative_difficulty) logger.debug('Got better difficulty %s', channel) if channel: logger.debug('Connection id >> %s', channel.peer) channel.send_get_headerhash_list(self._chain_manager.height) reactor.callLater(config.user.chain_state_broadcast_period, self.monitor_chain_state) # FIXME: REMOVE. This is temporary def set_chain_manager(self, chain_manager: ChainManager): self._chain_manager = chain_manager #################################################### #################################################### #################################################### #################################################### def start_pow(self, mining_thread_count): self._pow = POW(chain_manager=self._chain_manager, p2p_factory=self._p2pfactory, sync_state=self._sync_state, time_provider=ntp, mining_address=self.mining_address, mining_thread_count=mining_thread_count) self._pow.start() def start_listening(self): self._p2pfactory = P2PFactory(chain_manager=self._chain_manager, sync_state=self.sync_state, qrl_node=self) # FIXME: Try to avoid cyclic references self.peer_manager.set_p2p_factory(self._p2pfactory) self._p2pfactory.start_listening() #################################################### #################################################### #################################################### #################################################### @staticmethod def validate_amount(amount_str: str) -> bool: # FIXME: Refactored code. Review Decimal usage all over the code Decimal(amount_str) return True #################################################### #################################################### #################################################### #################################################### @staticmethod def create_multi_sig_txn(signatories: list, weights: list, threshold: int, fee: int, xmss_pk: bytes, master_addr: bytes): return MultiSigCreate.create(signatories=signatories, weights=weights, threshold=threshold, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) @staticmethod def create_multi_sig_spend_txn(multi_sig_address: bytes, addrs_to: list, amounts: list, expiry_block_number: int, fee: int, xmss_pk: bytes, master_addr: bytes): return MultiSigSpend.create(multi_sig_address=multi_sig_address, addrs_to=addrs_to, amounts=amounts, expiry_block_number=expiry_block_number, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) @staticmethod def create_multi_sig_vote_txn(shared_key: bytes, unvote: bool, fee: int, xmss_pk: bytes, master_addr: bytes): return MultiSigVote.create(shared_key=shared_key, unvote=unvote, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) @staticmethod def create_message_txn(message_hash: bytes, addr_to: bytes, fee: int, xmss_pk: bytes, master_addr: bytes): return MessageTransaction.create(message_hash=message_hash, addr_to=addr_to, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) @staticmethod def create_token_txn(symbol: bytes, name: bytes, owner: bytes, decimals: int, initial_balances, fee: int, xmss_pk: bytes, master_addr: bytes): return TokenTransaction.create(symbol, name, owner, decimals, initial_balances, fee, xmss_pk, master_addr) @staticmethod def create_transfer_token_txn(addrs_to: list, token_txhash: bytes, amounts: list, fee: int, xmss_pk: bytes, master_addr: bytes): return TransferTokenTransaction.create(token_txhash, addrs_to, amounts, fee, xmss_pk, master_addr) def create_send_tx(self, addrs_to: list, amounts: list, message_data: bytes, fee: int, xmss_pk: bytes, master_addr: bytes) -> TransferTransaction: addr_from = self.get_addr_from(xmss_pk, master_addr) balance = self._chain_manager.get_address_balance(addr_from) if sum(amounts) + fee > balance: raise ValueError("Not enough funds in the source address") return TransferTransaction.create(addrs_to=addrs_to, amounts=amounts, message_data=message_data, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) @staticmethod def create_slave_tx(slave_pks: list, access_types: list, fee: int, xmss_pk: bytes, master_addr: bytes) -> SlaveTransaction: return SlaveTransaction.create(slave_pks=slave_pks, access_types=access_types, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) @staticmethod def create_lattice_tx(pk1: bytes, pk2: bytes, pk3: bytes, fee: int, xmss_pk: bytes, master_addr: bytes) -> LatticeTransaction: return LatticeTransaction.create(pk1=pk1, pk2=pk2, pk3=pk3, fee=fee, xmss_pk=xmss_pk, master_addr=master_addr) # FIXME: Rename this appropriately def submit_send_tx(self, tx) -> bool: if tx is None: raise ValueError("The transaction was empty") if self._chain_manager.tx_pool.is_full_pending_transaction_pool(): raise ValueError("Pending Transaction Pool is full") return self._p2pfactory.add_unprocessed_txn(tx, ip=None) # TODO (cyyber): Replace None with IP made API request @staticmethod def get_addr_from(xmss_pk, master_addr): if master_addr: return master_addr return bytes(QRLHelper.getAddress(xmss_pk)) def get_address_is_used(self, address: bytes) -> bool: if not OptimizedAddressState.address_is_valid(address): raise ValueError("Invalid Address") return self._chain_manager.get_address_is_used(address) def get_address_state(self, address: bytes, exclude_ots_bitfield: bool = False, exclude_transaction_hashes: bool = False) -> AddressState: if address != config.dev.coinbase_address and not AddressState.address_is_valid(address): raise ValueError("Invalid Address") address_state = self._chain_manager.get_address_state(address, exclude_ots_bitfield, exclude_transaction_hashes) return address_state def get_optimized_address_state(self, address: bytes) -> OptimizedAddressState: if address != config.dev.coinbase_address and not OptimizedAddressState.address_is_valid(address): raise ValueError("Invalid Address") address_state = self._chain_manager.get_optimized_address_state(address) return address_state def get_multi_sig_address_state(self, address: bytes) -> MultiSigAddressState: if not MultiSigAddressState.address_is_valid(address): raise ValueError("Invalid Address") multi_sig_address_state = self._chain_manager.get_multi_sig_address_state(address) return multi_sig_address_state def get_ots(self, address: bytes, page_from: int, page_count: int, unused_ots_index_from: int) -> (list, Optional[int], bool): if not OptimizedAddressState.address_is_valid(address): return None, None, None max_bitfield = 2 ** OptimizedAddressState.get_height_from_address(address) max_pages = (max_bitfield // config.dev.ots_tracking_per_page) + 1 page_from = min(page_from, max_pages) max_pages = min(page_from + page_count - 1, max_pages) bitfields = list() for page in range(page_from, max_pages + 1): bitfield = self._chain_manager.get_bitfield(address, page) bitfields.append(qrl_pb2.OTSBitfieldByPage(ots_bitfield=bitfield, page_number=page)) unused_ots_index = self._chain_manager.get_unused_ots_index2(address, unused_ots_index_from) unused_ots_index_found = unused_ots_index is not None return bitfields, unused_ots_index, unused_ots_index_found def is_slave(self, master_address: bytes, slave_pk: bytes): return self._chain_manager.is_slave(master_address, slave_pk) def get_all_address_state(self) -> list: return self._chain_manager.get_all_address_state() def _load_transaction_hashes(self, address: bytes, item_per_page: int, page_number: int) -> list: address_state = self._chain_manager.get_optimized_address_state(address) start_item_index = max(0, address_state.transaction_hash_count() - item_per_page * page_number) end_item_index = min(address_state.transaction_hash_count(), start_item_index + item_per_page) transaction_hashes = self._chain_manager.get_transaction_hashes(address, start_item_index) actual_start_item_index = (start_item_index // config.dev.data_per_page) * config.dev.data_per_page transaction_hashes = transaction_hashes[start_item_index - actual_start_item_index:] while actual_start_item_index < end_item_index: actual_start_item_index += config.dev.data_per_page transaction_hashes.extend(self._chain_manager.get_transaction_hashes(address, actual_start_item_index)) return transaction_hashes[:item_per_page][-1::-1] def _load_multi_sig_spend_txn_hashes(self, address: bytes, item_per_page: int, page_number: int, mode: int) -> list: if OptimizedAddressState.address_is_valid(address): address_state = self._chain_manager.get_optimized_address_state(address) elif MultiSigAddressState.address_is_valid(address): address_state = self._chain_manager.get_multi_sig_address_state(address) else: return [] start_item_index = max(0, address_state.multi_sig_spend_count() - item_per_page * page_number) end_item_index = min(address_state.multi_sig_spend_count(), start_item_index + item_per_page) if mode > 0: start_item_index = 0 end_item_index = address_state.multi_sig_spend_count() transaction_hashes = self._chain_manager.get_multi_sig_spend_txn_hashes(address, start_item_index) actual_start_item_index = (start_item_index // config.dev.data_per_page) * config.dev.data_per_page multi_sig_spend_txn_hashes = transaction_hashes[start_item_index - actual_start_item_index:] while actual_start_item_index < end_item_index and len(multi_sig_spend_txn_hashes) < item_per_page: actual_start_item_index += config.dev.data_per_page multi_sig_spend_txn_hashes.extend(self._chain_manager.get_multi_sig_spend_txn_hashes(address, actual_start_item_index)) return multi_sig_spend_txn_hashes[:item_per_page][-1::-1] def _load_token_transaction_hashes(self, address: bytes, item_per_page: int, page_number: int) -> list: address_state = self._chain_manager.get_optimized_address_state(address) start_item_index = max(0, address_state.tokens_count() - item_per_page * page_number) end_item_index = min(address_state.tokens_count(), start_item_index + item_per_page) transaction_hashes = self._chain_manager.get_token_transaction_hashes(address, start_item_index) actual_start_item_index = (start_item_index // config.dev.data_per_page) * config.dev.data_per_page token_transaction_hashes = transaction_hashes[start_item_index - actual_start_item_index:] while actual_start_item_index < end_item_index: actual_start_item_index += config.dev.data_per_page token_transaction_hashes.extend(self._chain_manager.get_token_transaction_hashes(address, actual_start_item_index)) return token_transaction_hashes[:item_per_page][-1::-1] def _load_slave_transaction_hashes(self, address: bytes, item_per_page: int, page_number: int) -> list: address_state = self._chain_manager.get_optimized_address_state(address) start_item_index = max(0, address_state.slaves_count() - item_per_page * page_number) end_item_index = min(address_state.slaves_count(), start_item_index + item_per_page) if start_item_index < 0: return [] transaction_hashes = self._chain_manager.get_slave_transaction_hashes(address, start_item_index) actual_start_item_index = (start_item_index // config.dev.data_per_page) * config.dev.data_per_page token_transaction_hashes = transaction_hashes[start_item_index - actual_start_item_index:] while actual_start_item_index < end_item_index: actual_start_item_index += config.dev.data_per_page token_transaction_hashes.extend(self._chain_manager.get_slave_transaction_hashes(address, actual_start_item_index)) return token_transaction_hashes[:item_per_page][-1::-1] def _load_lattice_pks_transaction_hashes(self, address: bytes, item_per_page: int, page_number: int) -> list: address_state = self._chain_manager.get_optimized_address_state(address) start_item_index = max(0, address_state.lattice_pk_count() - item_per_page * page_number) end_item_index = min(address_state.lattice_pk_count(), start_item_index + item_per_page) transaction_hashes = self._chain_manager.get_lattice_pks_transaction_hashes(address, start_item_index) actual_start_item_index = (start_item_index // config.dev.data_per_page) * config.dev.data_per_page lattice_pks_transaction_hashes = transaction_hashes[start_item_index - actual_start_item_index:] while actual_start_item_index < end_item_index: actual_start_item_index += config.dev.data_per_page lattice_pks_transaction_hashes.extend(self._chain_manager.get_lattice_pks_transaction_hashes(address, actual_start_item_index)) return lattice_pks_transaction_hashes[:item_per_page][-1::-1] def _load_multi_sig_addresses(self, address: bytes, item_per_page: int, page_number: int) -> list: address_state = self._chain_manager.get_optimized_address_state(address) start_item_index = max(0, address_state.multi_sig_address_count() - item_per_page * page_number) end_item_index = min(address_state.multi_sig_address_count(), start_item_index + item_per_page) multi_sig_addresses = self._chain_manager.get_multi_sig_addresses(address, start_item_index) actual_start_item_index = (start_item_index // config.dev.data_per_page) * config.dev.data_per_page multi_sig_addresses = multi_sig_addresses[start_item_index - actual_start_item_index:] while actual_start_item_index < end_item_index: actual_start_item_index += config.dev.data_per_page multi_sig_addresses.extend(self._chain_manager.get_multi_sig_addresses(address, actual_start_item_index)) return multi_sig_addresses[:item_per_page][-1::-1] def _load_inbox_message_transaction_hashes(self, address: bytes, item_per_page: int, page_number: int) -> list: address_state = self._chain_manager.get_optimized_address_state(address) start_item_index = max(0, address_state.inbox_message_count() - item_per_page * page_number) end_item_index = min(address_state.inbox_message_count(), start_item_index + item_per_page) transaction_hashes = self._chain_manager.get_inbox_message_transaction_hashes(address, start_item_index) actual_start_item_index = (start_item_index // config.dev.data_per_page) * config.dev.data_per_page inbox_message_transaction_hashes = transaction_hashes[start_item_index - actual_start_item_index:] while actual_start_item_index < end_item_index: actual_start_item_index += config.dev.data_per_page inbox_message_transaction_hashes.extend(self._chain_manager.get_inbox_message_transaction_hashes(address, actual_start_item_index)) return inbox_message_transaction_hashes[:item_per_page][-1::-1] def get_mini_transactions_by_address(self, address: bytes, item_per_page: int, page_number: int): if item_per_page == 0: return None mini_transactions = [] transaction_hashes = self._load_transaction_hashes(address, item_per_page, page_number) response = qrl_pb2.GetMiniTransactionsByAddressResp() for tx_hash in transaction_hashes: mini_transaction = qrl_pb2.MiniTransaction() mini_transaction.transaction_hash = bin2hstr(tx_hash) tx, _ = self._chain_manager.get_tx_metadata(tx_hash) amount = 0 if tx.addr_from == address: amount -= tx.fee if isinstance(tx, TransferTransaction): if tx.addr_from == address: amount -= tx.total_amount try: for i in range(len(tx.addrs_to)): if tx.addrs_to[i] == address: amount += tx.amounts[i] except ValueError: pass elif isinstance(tx, CoinBase): if tx.addr_to == address: amount += tx.amount elif isinstance(tx, MultiSigSpend): try: for i in range(len(tx.addrs_to)): if tx.addrs_to[i] == address: amount += tx.amounts[i] except ValueError: pass if amount < 0: mini_transaction.out = True mini_transaction.amount = abs(amount) mini_transactions.append(mini_transaction) response.mini_transactions.extend(mini_transactions) response.balance = self._chain_manager.get_address_balance(address) return response def get_transactions_by_address(self, address: bytes, item_per_page: int, page_number: int): if item_per_page == 0: return None transaction_hashes = self._load_transaction_hashes(address, item_per_page, page_number) response = qrl_pb2.GetTransactionsByAddressResp() for tx_hash in transaction_hashes: tx, block_number = self._chain_manager.get_tx_metadata(tx_hash) b = self.get_block_from_index(block_number) transaction_detail = qrl_pb2.GetTransactionResp(tx=tx.pbdata, confirmations=self.block_height - block_number + 1, block_number=block_number, block_header_hash=b.headerhash, timestamp=b.timestamp, addr_from=tx.addr_from) response.transactions_detail.extend([transaction_detail]) return response def get_multi_sig_spend_txs_by_address(self, address: bytes, item_per_page: int, page_number: int, filter_type: int): # filter_type = 0 | No Filter (default) # filter_type = 1 | Executed Only (All executed are considered to be expired) # filter_type = 2 | Non Executed # filter_type = 3 | Expired # filter_type = 4 | Non Expired # filter_type = 5 | Non Executed & Expired # filter_type = 6 | Non Executed & Non Expired if item_per_page == 0: return None transaction_hashes = self._load_multi_sig_spend_txn_hashes(address, item_per_page, page_number, filter_type) response = qrl_pb2.GetMultiSigSpendTxsByAddressResp() for tx_hash in transaction_hashes: if filter_type in (1, 2, 5, 6): vote_stats = self._chain_manager.get_vote_stats(tx_hash) if filter_type == 1 and not vote_stats.executed: continue if filter_type in (2, 5, 6) and vote_stats.executed: continue tx, block_number = self._chain_manager.get_tx_metadata(tx_hash) current_block_number = self._chain_manager.height is_expired = tx.expiry_block_number <= current_block_number if filter_type in (4, 6): if is_expired: continue if filter_type in (3, 5): if not is_expired: continue b = self.get_block_from_index(block_number) transaction_detail = qrl_pb2.GetTransactionResp(tx=tx.pbdata, confirmations=self.block_height - block_number + 1, block_number=block_number, block_header_hash=b.headerhash, timestamp=b.timestamp, addr_from=tx.addr_from) response.transactions_detail.extend([transaction_detail]) return response def get_vote_stats(self, multi_sig_spend_tx_hash: bytes): vote_stats = self._chain_manager.get_vote_stats(multi_sig_spend_tx_hash) return qrl_pb2.GetVoteStatsResp(vote_stats=vote_stats.pbdata) def get_inbox_messages_by_address(self, address: bytes, item_per_page: int, page_number: int): if item_per_page == 0: return None transaction_hashes = self._load_inbox_message_transaction_hashes(address, item_per_page, page_number) response = qrl_pb2.GetTransactionsByAddressResp() for tx_hash in transaction_hashes: tx, block_number = self._chain_manager.get_tx_metadata(tx_hash) b = self.get_block_from_index(block_number) transaction_detail = qrl_pb2.GetTransactionResp(tx=tx.pbdata, confirmations=self.block_height - block_number + 1, block_number=block_number, block_header_hash=b.headerhash, timestamp=b.timestamp, addr_from=tx.addr_from) response.transactions_detail.extend([transaction_detail]) return response def get_tokens_by_address(self, address: bytes, item_per_page: int, page_number: int): if item_per_page == 0: return None token_hashes = self._load_token_transaction_hashes(address, item_per_page, page_number) response = qrl_pb2.GetTokensByAddressResp() for tx_hash in token_hashes: tx, _ = self._chain_manager.get_tx_metadata(tx_hash) token_balance = self._chain_manager.get_token(address, tx.txhash) transaction_detail = qrl_pb2.TokenDetail(token_txhash=tx.txhash, name=tx.name, symbol=tx.symbol, balance=token_balance.balance) response.tokens_detail.extend([transaction_detail]) return response def get_slaves_by_address(self, address: bytes, item_per_page: int, page_number: int): if item_per_page > config.dev.data_per_page or item_per_page == 0: return None slave_hashes = self._load_slave_transaction_hashes(address, item_per_page, page_number) response = qrl_pb2.GetSlavesByAddressResp() for tx_hash in slave_hashes: tx, _ = self._chain_manager.get_tx_metadata(tx_hash) for index in range(0, len(tx.slave_pks)): transaction_detail = qrl_pb2.SlaveDetail(slave_address=bytes(QRLHelper.getAddress(tx.slave_pks[index])), access_type=tx.access_types[index]) response.slaves_detail.extend([transaction_detail]) return response def get_lattice_pks_by_address(self, address: bytes, item_per_page: int, page_number: int): if item_per_page > config.dev.data_per_page or item_per_page == 0: return None lattice_pk_hashes = self._load_lattice_pks_transaction_hashes(address, item_per_page, page_number) response = qrl_pb2.GetLatticePKsByAddressResp() for tx_hash in lattice_pk_hashes: tx, _ = self._chain_manager.get_tx_metadata(tx_hash) transaction_detail = qrl_pb2.LatticePKsDetail(pk1=tx.pk1, pk2=tx.pk2, pk3=tx.pk3, tx_hash=tx_hash) response.lattice_pks_detail.extend([transaction_detail]) return response def get_multi_sig_addresses_by_address(self, address: bytes, item_per_page: int, page_number: int): if item_per_page > config.dev.data_per_page or item_per_page == 0: return None multi_sig_addresses = self._load_multi_sig_addresses(address, item_per_page, page_number) response = qrl_pb2.GetMultiSigAddressesByAddressResp() for multi_sig_address in multi_sig_addresses: multi_sig_detail = qrl_pb2.MultiSigDetail( address=multi_sig_address, balance=self._chain_manager.get_multi_sig_address_state(multi_sig_address).balance, ) response.multi_sig_detail.extend([multi_sig_detail]) return response def get_transaction(self, query_hash: bytes): """ This method returns an object that matches the query hash """ # FIXME: At some point, all objects in DB will indexed by a hash # TODO: Search tx hash # FIXME: We dont need searches, etc.. getting a protobuf indexed by hash from DB should be enough # FIXME: This is just a workaround to provide functionality result = self._chain_manager.get_tx_metadata(query_hash) return result def get_block_header_hash_by_number(self, query_block_number: int): return self._chain_manager.get_block_header_hash_by_number(query_block_number) def get_unconfirmed_transaction(self, query_hash: bytes): result = self._chain_manager.get_unconfirmed_transaction(query_hash) return result def get_block_last(self) -> Optional[Block]: """ This method returns an object that matches the query hash """ return self._chain_manager.last_block def get_block_from_hash(self, query_hash: bytes) -> Optional[Block]: """ This method returns an object that matches the query hash """ return self._chain_manager.get_block(query_hash) def get_block_from_index(self, index: int) -> Block: """ This method returns an object that matches the query hash """ return self._chain_manager.get_block_by_number(index) def get_blockidx_from_txhash(self, transaction_hash): result = self._chain_manager.get_tx_metadata(transaction_hash) if result: return result[1] return None def get_latest_blocks(self, offset, count) -> List[Block]: answer = [] end = self.block_height - offset start = max(0, end - count + 1) for blk_idx in range(start, end + 1): answer.append(self._chain_manager.get_block_by_number(blk_idx)) return answer def get_latest_transactions(self, offset, count): answer = [] skipped = 0 for tx in self._chain_manager.get_last_transactions(): if skipped >= offset: answer.append(tx) if len(answer) >= count: break else: skipped += 1 return answer def get_latest_transactions_unconfirmed(self, offset, count): answer = [] skipped = 0 for tx_set in self._chain_manager.tx_pool.transactions: if skipped >= offset: answer.append(tx_set[1]) if len(answer) >= count: break else: skipped += 1 return answer def get_node_info(self) -> qrl_pb2.NodeInfo: info = qrl_pb2.NodeInfo() info.version = self.version info.state = self.state info.num_connections = self.num_connections info.num_known_peers = self.num_known_peers info.uptime = self.uptime info.block_height = self.block_height info.block_last_hash = self._chain_manager.last_block.headerhash info.network_id = config.user.genesis_prev_headerhash return info def get_block_timeseries(self, block_count) -> Iterator[qrl_pb2.BlockDataPoint]: result = [] if self.block_height <= 0: return result block = self._chain_manager.last_block if block is None: return result headerhash_current = block.headerhash while len(result) < block_count: data_point = self._chain_manager.get_block_datapoint(headerhash_current) if data_point is None: break result.append(data_point) headerhash_current = data_point.header_hash_prev return reversed(result) def get_blockheader_and_metadata(self, block_number=0) -> Tuple: return self._chain_manager.get_blockheader_and_metadata(block_number) def get_block_to_mine(self, wallet_address) -> list: return self._chain_manager.get_block_to_mine(self._pow.miner, wallet_address) def submit_mined_block(self, blob) -> bool: return self._pow.miner.submit_mined_block(blob)
class QRLNode: def __init__(self, db_state: State, slaves: list): self.start_time = time.time() self.db_state = db_state self._sync_state = SyncState() self.peer_manager = P2PPeerManager() self.peer_manager.load_peer_addresses() self.peer_manager.register(P2PPeerManager.EventType.NO_PEERS, self.connect_peers) self.p2pchain_manager = P2PChainManager() self.tx_manager = P2PTxManagement() self._chain_manager = None # FIXME: REMOVE. This is temporary self._p2pfactory = None # FIXME: REMOVE. This is temporary self._pow = None self.slaves = slaves self.banned_peers_filename = os.path.join(config.user.wallet_dir, config.dev.banned_peers_filename) reactor.callLater(10, self.monitor_chain_state) #################################################### #################################################### #################################################### #################################################### @property def version(self): # FIXME: Move to __version__ coming from pip return config.dev.version @property def sync_state(self) -> SyncState: return self._sync_state @property def state(self): if self._p2pfactory is None: return ESyncState.unknown.value # FIXME return self._p2pfactory.sync_state.state.value @property def num_connections(self): if self._p2pfactory is None: return 0 # FIXME return self._p2pfactory.connections @property def num_known_peers(self): # FIXME return len(self.peer_addresses) @property def uptime(self): return int(time.time() - self.start_time) @property def block_height(self): return self._chain_manager.height @property def epoch(self): if not self._chain_manager.get_last_block(): return 0 return self._chain_manager.get_last_block().block_number // config.dev.blocks_per_epoch @property def uptime_network(self): block_one = self._chain_manager.get_block_by_number(1) network_uptime = 0 if block_one: network_uptime = int(time.time() - block_one.timestamp) return network_uptime @property def block_last_reward(self): if not self._chain_manager.get_last_block(): return 0 return self._chain_manager.get_last_block().block_reward @property def block_time_mean(self): # FIXME: Keep a moving mean return 0 @property def block_time_sd(self): # FIXME: Keep a moving var return 0 @property def coin_supply(self): # FIXME: Keep a moving var return self.db_state.total_coin_supply() @property def coin_supply_max(self): # FIXME: Keep a moving var return config.dev.max_coin_supply @property def peer_addresses(self): return self.peer_manager._peer_addresses #################################################### #################################################### #################################################### #################################################### def _update_banned_peers(self, banned_peers): current_time = ntp.getTime() ip_list = list(banned_peers.keys()) for ip in ip_list: if current_time > banned_peers[ip]: del banned_peers[ip] self._put_banned_peers(banned_peers) def _put_banned_peers(self, banned_peers: dict): with open(self.banned_peers_filename, 'w') as f: json.dump(banned_peers, f) def _get_banned_peers(self) -> dict: try: with open(self.banned_peers_filename, 'r') as f: banned_peers = json.load(f) except FileNotFoundError: banned_peers = dict() self._put_banned_peers(banned_peers) return banned_peers def is_banned(self, peer_ip: str): banned_peers = self._get_banned_peers() self._update_banned_peers(banned_peers) if peer_ip in banned_peers: return True def ban_peer(self, peer_obj): ip = peer_obj.peer_ip ban_time = ntp.getTime() + (config.user.ban_minutes * 60) banned_peers = self._get_banned_peers() banned_peers[ip] = ban_time self._update_banned_peers(banned_peers) logger.warning('Banned %s', peer_obj.peer_ip) peer_obj.loseConnection() def monitor_chain_state(self): self.peer_manager.monitor_chain_state() last_block = self._chain_manager.get_last_block() block_metadata = self.db_state.get_block_metadata(last_block.headerhash) node_chain_state = qrl_pb2.NodeChainState(block_number=last_block.block_number, header_hash=last_block.headerhash, cumulative_difficulty=bytes(block_metadata.cumulative_difficulty), timestamp=int(time.time())) self.peer_manager.broadcast_chain_state(node_chain_state=node_chain_state) channel = self.peer_manager.get_better_difficulty(block_metadata.cumulative_difficulty) logger.debug('Got better difficulty %s', channel) if channel: logger.debug('Connection id >> %s', channel.connection_id) channel.get_headerhash_list(self._chain_manager.height) reactor.callLater(config.user.chain_state_broadcast_period, self.monitor_chain_state) # FIXME: REMOVE. This is temporary def set_chain(self, chain_manager: ChainManager): self._chain_manager = chain_manager #################################################### #################################################### #################################################### #################################################### def connect_peers(self): logger.info('<<<Reconnecting to peer list: %s', self.peer_addresses) for peer_address in self.peer_addresses: if self.is_banned(peer_address): continue self._p2pfactory.connect_peer(peer_address) def start_pow(self): # FIXME: This seems an unexpected side effect. It should be refactored self._pow = POW(chain_manager=self._chain_manager, p2p_factory=self._p2pfactory, sync_state=self._sync_state, time_provider=ntp, slaves=self.slaves) self._pow.start() def start_listening(self): self._p2pfactory = P2PFactory(chain_manager=self._chain_manager, sync_state=self.sync_state, qrl_node=self) # FIXME: Try to avoid cycle references self._p2pfactory.start_listening() #################################################### #################################################### #################################################### #################################################### @staticmethod def validate_amount(amount_str: str) -> bool: # FIXME: Refactored code. Review Decimal usage all over the code Decimal(amount_str) return True #################################################### #################################################### #################################################### #################################################### def get_address_bundle(self, key_addr: bytes): for addr in self._chain_manager.wallet.address_bundle: if addr.address == key_addr: return addr return None # FIXME: Rename this appropriately def transfer_coins(self, addr_from: bytes, addr_to: bytes, amount: int, xmss_ots_index: int, fee: int = 0): addr_bundle = self.get_address_bundle(addr_from) if addr_bundle is None: raise LookupError("The source address does not belong to this wallet/node") xmss_from = addr_bundle.xmss if xmss_from is None: raise LookupError("The source address does not belong to this wallet/node") xmss_pk = xmss_from.pk() # TODO: Review this # Balance validation if xmss_from.get_remaining_signatures() == 1: balance = self.db_state.balance(addr_from) if amount + fee < balance: # FIXME: maybe this is too strict? raise RuntimeError("Last signature! You must move all the funds to another account!") tx = self.create_send_tx(addr_from, addr_to, amount, fee, xmss_pk, xmss_ots_index) tx.sign(xmss_from) self.submit_send_tx(tx) return tx @staticmethod def create_token_txn(addr_from: bytes, symbol: bytes, name: bytes, owner: bytes, decimals: int, initial_balances, fee: int, xmss_pk: bytes, xmss_ots_index: int): return TokenTransaction.create(addr_from, symbol, name, owner, decimals, initial_balances, fee, xmss_pk, xmss_ots_index) @staticmethod def create_transfer_token_txn(addr_from: bytes, addr_to: bytes, token_txhash: bytes, amount: int, fee: int, xmss_pk: bytes, xmss_ots_index: int): return TransferTokenTransaction.create(addr_from, token_txhash, addr_to, amount, fee, xmss_pk, xmss_ots_index) # FIXME: Rename this appropriately def create_send_tx(self, addr_from: bytes, addr_to: bytes, amount: int, fee: int, xmss_pk: bytes, xmss_ots_index: int) -> TransferTransaction: balance = self.db_state.balance(addr_from) if amount + fee > balance: raise RuntimeError("Not enough funds in the source address") return TransferTransaction.create(addr_from=addr_from, addr_to=addr_to, amount=amount, fee=fee, xmss_pk=xmss_pk, xmss_ots_index=xmss_ots_index) def create_slave_tx(self, addr_from: bytes, slave_pks: list, access_types: list, fee: int, xmss_pk: bytes, xmss_ots_index: int) -> SlaveTransaction: return SlaveTransaction.create(addr_from=addr_from, slave_pks=slave_pks, access_types=access_types, fee=fee, xmss_pk=xmss_pk, xmss_ots_index=xmss_ots_index) # FIXME: Rename this appropriately def submit_send_tx(self, tx) -> bool: if tx is None: raise ValueError("The transaction was empty") if self._chain_manager.tx_pool.is_full_transaction_pool(): raise ValueError("Transaction Pool is full") if tx.subtype in (qrl_pb2.Transaction.TRANSFER, qrl_pb2.Transaction.LATTICE, qrl_pb2.Transaction.MESSAGE, qrl_pb2.Transaction.TOKEN, qrl_pb2.Transaction.TRANSFERTOKEN, qrl_pb2.Transaction.SLAVE): self._p2pfactory.add_unprocessed_txn(tx, ip=None) # TODO (cyyber): Replace None with IP made API request return True def get_address_is_used(self, address: bytes) -> bool: if not AddressState.address_is_valid(address): raise ValueError("Invalid Address") return self.db_state.address_used(address) def get_address_state(self, address: bytes) -> qrl_pb2.AddressState: if not AddressState.address_is_valid(address): raise ValueError("Invalid Address") address_state = self.db_state.get_address(address) return address_state def get_transaction(self, query_hash: bytes): """ This method returns an object that matches the query hash """ # FIXME: At some point, all objects in DB will indexed by a hash # TODO: Search tx hash # FIXME: We dont need searches, etc.. getting a protobuf indexed by hash from DB should be enough # FIXME: This is just a workaround to provide functionality result = self._chain_manager.get_transaction(query_hash) if result: return result[0], result[1] return None, None def get_block_from_hash(self, query_hash: bytes) -> Optional[Block]: """ This method returns an object that matches the query hash """ return self.db_state.get_block(query_hash) def get_block_from_index(self, index: int) -> Block: """ This method returns an object that matches the query hash """ return self.db_state.get_block_by_number(index) def get_blockidx_from_txhash(self, transaction_hash): result = self.db_state.get_tx_metadata(transaction_hash) if result: return result[1] return None def get_token_detailed_list(self): pbdata = self.db_state.get_token_list() token_list = TokenList.from_json(pbdata) token_detailed_list = qrl_pb2.TokenDetailedList() for token_txhash in token_list.token_txhash: token_txn, _ = self.db_state.get_tx_metadata(token_txhash) token_detailed_list.tokens.extend([token_txn.pbdata]) return token_detailed_list def get_latest_blocks(self, offset, count) -> List[Block]: answer = [] end = self.block_height - offset start = max(0, end - count - offset) for blk_idx in range(start, end + 1): answer.append(self._chain_manager.get_block_by_number(blk_idx)) return answer def get_latest_transactions(self, offset, count): # FIXME: This is incorrect # FIXME: Moved code. Breaking encapsulation. Refactor answer = [] skipped = 0 for tx in self.db_state.get_last_txs(): if isinstance(tx, TransferTransaction): if skipped >= offset: answer.append(tx) if len(answer) >= count: break else: skipped += 1 return answer def get_latest_transactions_unconfirmed(self, offset, count): answer = [] skipped = 0 for tx in self._chain_manager.tx_pool.transaction_pool: if isinstance(tx, TransferTransaction): if skipped >= offset: answer.append(tx) if len(answer) >= count: break else: skipped += 1 return answer def getNodeInfo(self) -> qrl_pb2.NodeInfo: info = qrl_pb2.NodeInfo() info.version = self.version info.state = self.state info.num_connections = self.num_connections info.num_known_peers = self.num_known_peers info.uptime = self.uptime info.block_height = self.block_height info.block_last_hash = b'' # FIXME info.network_id = config.dev.genesis_prev_headerhash # FIXME return info #################################################### #################################################### #################################################### #################################################### def broadcast_ephemeral_message(self, encrypted_ephemeral: EncryptedEphemeralMessage) -> bool: if not encrypted_ephemeral.validate(): return False self._p2pfactory.broadcast_ephemeral_message(encrypted_ephemeral) return True def collect_ephemeral_message(self, msg_id): self._chain_manager.collect_ephemeral_message(msg_id)