def _read_wallet(self): self.address_bundle = [] if not os.path.isfile(self.wallet_dat_filename): return try: with open(self.wallet_dat_filename, "rb") as infile: wallet_store = qrl_pb2.WalletStore() wallet_store.ParseFromString(bytes(infile.read())) self.address_bundle = [] for a in wallet_store.wallets: tmpxmss = XMSS(config.dev.xmss_tree_height, mnemonic2bin(a.mnemonic.strip())) tmpxmss.set_index(a.xmss_index) if a.address.encode() != tmpxmss.get_address(): logger.fatal("Mnemonic and address do not match.") exit(1) self.address_bundle.append(AddressBundle(tmpxmss.get_address(), tmpxmss)) except Exception as e: logger.warning("It was not possible to open the wallet: %s", e)
def rollback(self, block): hash_path = [] while True: if self.state.state_objects.contains(block.headerhash): break hash_path.append(block.headerhash) new_block = self.state.get_block(block.prev_headerhash) if not new_block: logger.warning('No block found %s', block.prev_headerhash) break block = new_block if block.block_number == 0: del hash_path[-1] # Skip replaying Genesis Block break self.state.state_objects.destroy_current_state(None) block = self.state.get_block(hash_path[-1]) self.state.state_objects.destroy_fork_states(block.block_number, block.headerhash) for header_hash in hash_path[-1::-1]: block = self.state.get_block(header_hash) address_set = self.state.prepare_address_list(block) # Prepare list for current block address_txn = self.state.get_state_mainchain(address_set) self.state.update_mainchain_state(address_txn, block.block_number, block.headerhash) self.last_block = block self._update_mainchain(block, None) self.tx_pool.remove_tx_in_block_from_pool(block) self.state.update_mainchain_height(block.block_number, None) self.state.update_tx_metadata(block, None) self.trigger_miner = True
def validate_mining_nonce(self, block, enable_logging=False): parent_metadata = self.state.get_block_metadata(block.prev_headerhash) parent_block = self.state.get_block(block.prev_headerhash) input_bytes = StringToUInt256(str(block.mining_nonce))[-4:] + tuple(block.mining_hash) measurement = self.state.get_measurement(block.timestamp, block.prev_headerhash) diff, target = self._difficulty_tracker.get( measurement=measurement, parent_difficulty=parent_metadata.block_difficulty) if enable_logging: logger.debug('-----------------START--------------------') logger.debug('Validate #%s', block.block_number) logger.debug('block.timestamp %s', block.timestamp) logger.debug('parent_block.timestamp %s', parent_block.timestamp) logger.debug('parent_block.difficulty %s', UInt256ToString(parent_metadata.block_difficulty)) logger.debug('input_bytes %s', UInt256ToString(input_bytes)) logger.debug('diff : %s | target : %s', UInt256ToString(diff), target) logger.debug('-------------------END--------------------') if not PoWHelper.verifyInput(input_bytes, target): if enable_logging: logger.warning("PoW verification failed") qn = Qryptonight() tmp_hash = qn.hash(input_bytes) logger.warning("{}".format(tmp_hash)) logger.debug('%s', block.to_json()) return False return True
def set_addresses_state(self, addresses_state: dict, state_code: bytes): """ Sets the addresses_state from the latest state objects cache from or after state_code. :param addresses_state: :param state_code: :return: """ str_state_code = bin2hstr(state_code).encode() index = -1 found = False for state_object in self.state_objects.state_loaders: index += 1 if state_object.state_code == str_state_code: found = True break if not found: logger.warning('Not Possible: State Code not found %s', str_state_code) raise Exception for address in addresses_state: for state_obj_index in range(index, -1, -1): state_object = self.state_objects.get_state_loader_by_index(state_obj_index) addresses_state[address] = state_object.get_address(address) if addresses_state[address]: break if not addresses_state[address]: addresses_state[address] = self._get_address_state(address)
def get_mining_xmss(self): if self._mining_xmss: addr_state = self.state.get_address(self._mining_xmss.get_address()) if self.set_unused_ots_key(self._mining_xmss, addr_state, self._mining_xmss.get_index()): if self.valid_mining_permission(): return self._mining_xmss else: self._mining_xmss = None return None if not self._mining_xmss: self._master_address = self._slaves[0].encode() unused_ots_found = False for slave_seed in self._slaves[1]: xmss = Wallet.get_new_address(seed=slave_seed).xmss addr_state = self.state.get_address(xmss.get_address()) if self.set_unused_ots_key(xmss, addr_state): # Unused ots_key_found self._mining_xmss = xmss unused_ots_found = True break if not unused_ots_found: # Unused ots_key_found logger.warning('No OTS-KEY left for mining') return None if self._master_address == self._mining_xmss.get_address(): return self._mining_xmss if not self.valid_mining_permission(): return None return self._mining_xmss
def get_state_loader_by_index(self, index) -> StateLoader: if index >= len(self._state_loaders): logger.warning('Index is not in range') logger.warning('Index: %s, Len of State_loaders: %s', index, len(self._state_loaders)) raise Exception return self._state_loaders[index]
def _parse_buffer(self, total_read): # FIXME: This parsing/wire protocol needs to be replaced """ >>> from pyqrllib.pyqrllib import hstr2bin >>> p=P2PProtocol() >>> p._buffer = bytes(hstr2bin('000000191a170a0776657273696f6e120c67656e657369735f68617368'+ \ '000000191a170a0776657273696f6e120c67656e657369735f68617368')) >>> messages = p._parse_buffer([0]) >>> len(list(messages)) 2 """ while self._buffer: # FIXME: This is not the final implementation, it is just a minimal implementation for refactoring if len(self._buffer) < 4: # Buffer is still incomplete as it doesn't have message size return chunk_size_raw = self._buffer[:4] chunk_size = struct.unpack('>L', chunk_size_raw)[0] # is m length encoded correctly? # FIXME: There is no limitation on the buffer size or timeout if len(self._buffer) < chunk_size: # Buffer is still incomplete as it doesn't have message return try: message_raw = self._buffer[4:4 + chunk_size] message = qrllegacy_pb2.LegacyMessage() message.ParseFromString(message_raw) yield message except Exception as e: logger.warning("Problem parsing message. Skipping") finally: self._buffer = self._buffer[4 + chunk_size:] total_read[0] += 4 + chunk_size
def _validate_custom(self): if self.fee <= 0: raise ValueError('TransferTokenTransaction [%s] Invalid Fee = %d', bin2hstr(self.txhash), self.fee) if not (AddressState.address_is_valid(self.addr_from) and AddressState.address_is_valid(self.txto)): logger.warning('Invalid address addr_from: %s addr_to: %s', self.addr_from, self.txto) return False return True
def _validate_custom(self): if self.amount <= 0: raise ValueError('[%s] Invalid amount = %d', bin2hstr(self.txhash), self.amount) if not (AddressState.address_is_valid(self.addr_from) and AddressState.address_is_valid(self.txto)): logger.warning('Invalid address addr_from: %s addr_to: %s', self.addr_from, self.txto) return False return True
def solutionEvent(self, nonce): # NOTE: This function usually runs in the context of a C++ thread try: logger.debug('Solution Found %s', nonce) self._mining_block.set_mining_nonce(nonce) logger.info('Block #%s nonce: %s', self._mining_block.block_number, StringToUInt256(str(nonce))[-4:]) logger.info('Hash Rate: %s H/s', self.hashRate()) cloned_block = copy.deepcopy(self._mining_block) self.pre_block_logic(cloned_block) except Exception as e: logger.warning("Exception in solutionEvent") logger.exception(e)
def validate_extended(self, addr_from_state, addr_from_pk_state, transaction_pool): if not self.validate_slave(addr_from_state, addr_from_pk_state): return False if self.addr_from != config.dev.coinbase_address: return False if not (AddressState.address_is_valid(self.addr_from) and AddressState.address_is_valid(self.txto)): logger.warning('Invalid address addr_from: %s addr_to: %s', self.addr_from, self.txto) return False return self.validate()
def valid_mining_permission(self): if self._master_address == self._mining_xmss.get_address(): return True addr_state = self.state.get_address(self._master_address) access_type = addr_state.get_slave_permission(self._mining_xmss.pk()) if access_type == -1: logger.warning('Slave is not authorized yet for mining') logger.warning('Added Slave Txn') slave_tx = Transaction.from_json(self._slaves[2]) self._add_unprocessed_txn_fn(slave_tx, None) return None return True
def get_state(self, header_hash, addresses_set): tmp_header_hash = header_hash parent_headerhash = None addresses_state = dict() for address in addresses_set: addresses_state[address] = None while True: if self.state_objects.contains(header_hash): parent_headerhash = header_hash self.set_addresses_state(addresses_state, header_hash) break block = self.get_block(header_hash) if not block: logger.warning('[get_state] No Block Found %s', header_hash) break if block.block_number == 0: break header_hash = block.prev_headerhash for genesis_balance in GenesisBlock().genesis_balance: bytes_addr = genesis_balance.address.encode() if not addresses_state[bytes_addr]: addresses_state[bytes_addr] = AddressState.get_default(bytes_addr) addresses_state[bytes_addr]._data.balance = genesis_balance.balance for address in addresses_state: if not addresses_state[address]: addresses_state[address] = AddressState.get_default(address) header_hash = tmp_header_hash hash_path = [] while True: if parent_headerhash == header_hash: break block = self.get_block(header_hash) if not block: break hash_path.append(header_hash) header_hash = block.prev_headerhash if block.block_number == 0: break for header_hash in hash_path[-1::-1]: block = self.get_block(header_hash) for tx_pbdata in block.transactions: tx = Transaction.from_pbdata(tx_pbdata) tx.apply_on_state(addresses_state) return addresses_state
def solutionEvent(self, nonce): # NOTE: This function usually runs in the context of a C++ thread try: logger.debug('Solution Found %s', nonce) self._mining_block.set_nonces(nonce, 0) logger.info('Block #%s nonce: %s', self._mining_block.block_number, StringToUInt256(str(nonce))[-4:]) logger.info('Hash Rate: %s H/s', self.hashRate()) cloned_block = copy.deepcopy(self._mining_block) self.pre_block_logic(cloned_block) except Exception as e: logger.warning("Exception in solutionEvent") logger.exception(e)
def get_valid_peers(peer_ips, peer_ip, public_port): new_peers = set() tmp = list(peer_ips) tmp.append("{0}:{1}".format(peer_ip, public_port)) for ip_port in tmp: try: parse_peer_addr(ip_port, True) new_peers.add(ip_port) except Exception as _: logger.warning("Invalid Peer Address {} sent by {}".format(ip_port, peer_ip)) return new_peers
def check_stale_txn(self, state, current_block_number): i = 0 while i < len(self.transaction_pool): tx_info = self.transaction_pool[i][1] if tx_info.is_stale(current_block_number): if not tx_info.validate(state): logger.warning('Txn validation failed for tx in tx_pool') self.remove_tx_from_pool(tx_info.transaction) continue tx_info.update_block_number(current_block_number) self.broadcast_tx(tx_info.transaction) i += 1
def commit(self, state_loader, batch=None): # TODO (cyyber): Optimization, instead of moving from current to headerhash, # blocknumber could be used in state_code, and current could point to cache of # latest blocknumber for address in self._data.addresses: data = self._db.get_raw(self.state_code + address) if data is None: logger.warning('>>>>>>>>> GOT NONE <<<<<<< %s', address) self._db.put_raw(state_loader.state_code + address, data, batch) self.add_address(address) self._db.delete(self.state_code + address, batch) del self._data.addresses[:] self._db.put_raw(b'state' + self.state_code, MessageToJson(self._data).encode(), batch)
def handle_chain_state(self, source, message: qrllegacy_pb2.LegacyMessage): P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.CHAINSTATE) message.chainStateData.timestamp = ntp.getTime() # Receiving time try: UInt256ToString(message.chainStateData.cumulative_difficulty) except ValueError: logger.warning('Invalid Cumulative Difficulty sent by peer') source.loseConnection() return self._peer_node_status[source] = message.chainStateData
def connect_peer(self, full_address): try: addr = IPMetadata.from_full_address(full_address) connected_peers = self.get_connected_peer_addrs() should_connect = addr.full_address not in connected_peers if should_connect: reactor.connectTCP(addr.ip, addr.port, self) except Exception as e: logger.warning("Could not connect to %s - %s", full_address, str(e))
def put_tx_metadata(state: State, txn: Transaction, block_number: int, timestamp: int, batch) -> bool: try: tm = TransactionMetadata.create(tx=txn, block_number=block_number, timestamp=timestamp) state._db.put_raw(txn.txhash, tm.serialize(), batch) except Exception: logger.warning("Error writing tx metadata") return False return True
def _parse_buffer(self, total_read): # FIXME: This parsing/wire protocol needs to be replaced """ >>> from pyqrllib.pyqrllib import hstr2bin >>> p=P2PProtocol() >>> p._buffer = bytes(hstr2bin('000000191a170a0776657273696f6e120c67656e657369735f68617368'+ \ '000000191a170a0776657273696f6e120c67656e657369735f68617368')) >>> messages = p._parse_buffer([0]) >>> len(list(messages)) 2 """ chunk_size = 0 while self._buffer: if len(self._buffer) < 5: # Buffer is still incomplete as it doesn't have message size return ignore_skip = False try: chunk_size_raw = self._buffer[:4] chunk_size = struct.unpack('>L', chunk_size_raw)[0] # is m length encoded correctly? if chunk_size <= 0: logger.debug("<X< %s", bin2hstr(self._buffer)) raise Exception("Invalid chunk size <= 0") if chunk_size > config.dev.message_buffer_size: raise Exception("Invalid chunk size > message_buffer_size") if len(self._buffer) - 4 < chunk_size: # As 4 bytes includes chunk_size_raw ignore_skip = True # Buffer is still incomplete as it doesn't have message so skip moving buffer return message_raw = self._buffer[4:4 + chunk_size] message = qrllegacy_pb2.LegacyMessage() message.ParseFromString(message_raw) yield message except Exception as e: # no qa logger.warning("Problem parsing message. Banning+Dropping connection") logger.exception(e) self.peer_manager.ban_channel(self) finally: if not ignore_skip: skip = 4 + chunk_size self._buffer = self._buffer[skip:] total_read[0] += skip
def get_ntp_response(): for retry in range(NTP_RETRIES): ntp_server = config.user.ntp_servers[retry % len(config.user.ntp_servers)] try: ntp_client = NTPClient() response = ntp_client.request(ntp_server, version=NTP_VERSION) except Exception as e: logger.warning(e) continue return response # FIXME: Provide some proper clean before exiting logger.fatal("Could not contact NTP servers after %d retries", NTP_RETRIES) sys.exit(-1)
def get_ntp_response(): for retry in range(NTP_RETRIES): ntp_server = ntp_servers[retry % len(ntp_servers)] try: ntp_client = NTPClient() response = ntp_client.request(ntp_server, version=NTP_VERSION) except Exception as e: logger.warning(e) continue return response # FIXME: Provide some proper clean before exiting logger.fatal("Could not contact NTP servers after %d retries", NTP_RETRIES) sys.exit(-1)
def add_tx_from_block_to_pool(self, block: Block, current_block_number): """ Move all transactions from block to transaction pool. :param block: :return: """ for protobuf_tx in block.transactions[1:]: if not self.add_tx_to_pool(Transaction.from_pbdata(protobuf_tx), current_block_number): logger.warning( 'Failed to Add transaction into transaction pool') logger.warning('Block #%s %s', block.block_number, bin2hstr(block.headerhash)) return
def handle_message_received(self, source, message: qrllegacy_pb2.LegacyMessage): """ Message Receipt This function accepts message receipt from peer, checks if the message hash already been received or not. In case its a already received message, it is ignored. Otherwise the request is made to get the full message. :return: """ mr_data = message.mrData msg_hash = mr_data.hash # FIXME: Separate into respective message handlers if mr_data.type not in MessageReceipt.allowed_types: return if mr_data.type == qrllegacy_pb2.LegacyMessage.TX and source.factory.sync_state.state != ESyncState.synced: return if mr_data.type == qrllegacy_pb2.LegacyMessage.TX: if len(source.factory._chain_manager.tx_pool.pending_tx_pool ) >= config.dev.transaction_pool_size: logger.warning( 'TX pool size full, incoming tx dropped. mr hash: %s', bin2hstr(msg_hash)) return if mr_data.type == qrllegacy_pb2.LegacyMessage.BK: if mr_data.block_number > source.factory.chain_height + config.dev.max_margin_block_number: logger.debug('Skipping block #%s as beyond lead limit', mr_data.block_number) return if mr_data.block_number < source.factory.chain_height - config.dev.reorg_limit: logger.debug('Skipping block #%s as beyond re-org limit', mr_data.block_number) return if source.factory.master_mr.contains(msg_hash, mr_data.type): return source.factory.master_mr.add_peer(msg_hash, mr_data.type, source, mr_data) if source.factory.master_mr.is_callLater_active( msg_hash): # Ignore if already requested return source.factory.request_full_message(mr_data)
def validate_slave(self, addr_from_state: AddressState, addr_from_pk_state: AddressState): addr_from_pk = bytes(QRLHelper.getAddress(self.PK)) master_address = self._get_master_address() allowed_access_types = self._get_allowed_access_types() if self.master_addr == addr_from_pk: logger.warning('Matching master_addr field and address from PK') return False if addr_from_pk != master_address: if str(self.PK) not in addr_from_state.slave_pks_access_type: logger.warning("Public key and address don't match") return False access_type = addr_from_pk_state.slave_pks_access_type[str( self.PK)] if access_type not in allowed_access_types: logger.warning('Access Type %s', access_type) logger.warning( 'Slave Address doesnt have sufficient permission') return False return True
def validate_slave(self, addr_from_state: AddressState, addr_from_pk_state: AddressState): addr_from_pk = bytes(QRLHelper.getAddress(self.PK)) # Validate Slave for CoinBase txn is no more required if isinstance(self, CoinBase): master_address = self.addr_to allowed_access_types = [0, 1] else: master_address = self.addr_from allowed_access_types = [0] if self.master_addr == addr_from_pk: logger.warning('Matching master_addr field and address from PK') return False if addr_from_pk != master_address: if str(self.PK) not in addr_from_state.slave_pks_access_type: logger.warning("Public key and address don't match") return False access_type = addr_from_pk_state.slave_pks_access_type[str( self.PK)] if access_type not in allowed_access_types: logger.warning('Access Type %s', access_type) logger.warning( 'Slave Address doesnt have sufficient permission') return False return True
def _validate_custom(self) -> bool: if len(self.message_hash) == 0: logger.warning('Message cannot be empty') return False if len(self.addr_to) > 0 and not (OptimizedAddressState.address_is_valid(self.addr_to)): logger.warning('[MessageTransaction] Invalid address addr_to: %s', bin2hstr(self.addr_to)) return False if self.fee < 0: logger.info('State validation failed for %s because: Negative send', bin2hstr(self.txhash)) return False return True
def _validate_extended(self, state_container: StateContainer): if state_container.block_number < state_container.current_dev_config.hard_fork_heights[ 0]: logger.warning( "[MultiSigCreate] Hard Fork Feature not yet activated") return False if len( self.signatories ) > state_container.current_dev_config.transaction_multi_output_limit: logger.warning( '[MultiSigCreate] Number of signatories exceeds max limit') logger.warning('Number of Signatories %s', len(self.signatories)) logger.warning('Number of Weights %s', len(self.weights)) return False tx_balance = state_container.addresses_state[self.addr_from].balance if tx_balance < self.fee: logger.info( 'State validation failed for %s because: Insufficient funds', bin2hstr(self.txhash)) logger.info('balance: %s, fee: %s, amount: %s', tx_balance, self.fee) return False return True
def _try_branch_add_block(self, block, batch, check_stale=True) -> (bool, bool): """ This function returns list of bool types. The first bool represent if the block has been added successfully and the second bool represent the fork_flag, which becomes true when a block triggered into fork recovery. :param block: :param batch: :return: [Added successfully, fork_flag] """ if self._last_block.headerhash == block.prev_headerhash: if not self._apply_block(block, batch): return False, False self._state.put_block(block, batch) last_block_metadata = self._state.get_block_metadata( self._last_block.headerhash) if last_block_metadata is None: logger.warning("Could not find log metadata for %s", bin2hstr(self._last_block.headerhash)) return False, False last_block_difficulty = int( UInt256ToString(last_block_metadata.cumulative_difficulty)) new_block_metadata = self._add_block_metadata(block.headerhash, block.timestamp, block.prev_headerhash, batch) new_block_difficulty = int( UInt256ToString(new_block_metadata.cumulative_difficulty)) if new_block_difficulty > last_block_difficulty: if self._last_block.headerhash != block.prev_headerhash: fork_state = qrlstateinfo_pb2.ForkState( initiator_headerhash=block.headerhash) self._state.put_fork_state(fork_state, batch) self._state.write_batch(batch) return self._fork_recovery(block, fork_state), True self._update_chainstate(block, batch) if check_stale: self.tx_pool.check_stale_txn(self._state, block.block_number) self.trigger_miner = True return True, False
def monitor_connections(self): reactor.callLater(180, self.monitor_connections) if len(self._peer_connections) == 0: logger.warning('No Connected Peer Found') reactor.callLater(60, self._qrl_node.connect_peers) return connected_peers_set = set() for conn_protocol in self._peer_connections: connected_peers_set.add(conn_protocol.peer_ip) for ip in config.user.peer_list: if ip not in connected_peers_set: self.connect_peer(ip)
def combine_peer_lists(peer_ips, sender_full_addresses: List, check_global=False) -> Set[IPMetadata]: tmp_list = list(peer_ips) tmp_list.extend(sender_full_addresses) answer = set() for item in tmp_list: try: answer.add( IPMetadata.canonical_full_address(item, check_global)) except: # noqa logger.warning("Invalid Peer Address {}".format(item)) return answer
def handleEvent(self, event): # NOTE: This function usually runs in the context of a C++ thread try: if event.type == SOLUTION: nonce = event.nonce self._mining_block.set_nonces(nonce, 0) logger.debug('Solution Found %s', nonce) logger.info('Block #%s nonce: %s', self._mining_block.block_number, nonce) logger.info('Hash Rate: %s H/s', self.hashRate()) cloned_block = copy.deepcopy(self._mining_block) self.pre_block_logic(cloned_block) except Exception as e: logger.warning("Exception in solutionEvent") logger.exception(e)
def _read_wallet(self, filename) -> List[AddressItem]: answer = [] if not os.path.isfile(filename): return answer try: with open(filename, "rb") as infile: data = simplejson.loads(infile.read()) answer = [self._get_address_item_from_json(d) for d in data] except Exception as e: logger.warning("ReadWallet: %s", e) return answer
def monitor_connections(self): reactor.callLater(180, self.monitor_connections) if len(self._peer_connections) == 0: logger.warning('No Connected Peer Found') reactor.callLater(60, self._qrl_node.connect_peers) return connected_peers_set = set() for conn_protocol in self._peer_connections: connected_peers_set.add(conn_protocol.peer_ip) for ip in config.user.peer_list: if ip not in connected_peers_set: self.connect_peer(ip)
def _try_branch_add_block(self, block, batch=None) -> bool: parent_block = self.state.get_block(block.prev_headerhash) if not block.validate_parent_child_relation(parent_block): logger.warning('Failed to validate blocks parent child relation') return False address_set = self.state.prepare_address_list( block) # Prepare list for current block if self.last_block.headerhash == block.prev_headerhash: address_txn = self.state.get_state_mainchain(address_set) else: address_txn = self.state.get_state(block.prev_headerhash, address_set) if self.validate_block(block, address_txn): self.state.put_block(block, None) self.add_block_metadata(block.headerhash, block.timestamp, block.prev_headerhash, None) last_block_metadata = self.state.get_block_metadata( self.last_block.headerhash) new_block_metadata = self.state.get_block_metadata( block.headerhash) last_block_difficulty = int( UInt256ToString(last_block_metadata.cumulative_difficulty)) new_block_difficulty = int( UInt256ToString(new_block_metadata.cumulative_difficulty)) self.trigger_miner = False if new_block_difficulty > last_block_difficulty: if self.last_block.headerhash != block.prev_headerhash: self.rollback(block) return True self.state.update_mainchain_state(address_txn, block.block_number, block.headerhash) self.last_block = block self._update_mainchain(block, batch) self.tx_pool.remove_tx_in_block_from_pool(block) self.state.update_mainchain_height(block.block_number, batch) self.state.update_tx_metadata(block, batch) self.trigger_miner = True return True return False
def validate(self) -> bool: fee_reward = 0 for index in range(1, len(self.transactions)): fee_reward += self.transactions[index].fee if len(self.transactions) == 0: return False try: coinbase_txn = Transaction.from_pbdata(self.transactions[0]) coinbase_amount = coinbase_txn.amount except Exception as e: logger.warning('Exception %s', e) return False return self.blockheader.validate(fee_reward, coinbase_amount)
def validate_extended(self, addr_from_state, addr_from_pk_state): if not self.validate_slave(addr_from_state, addr_from_pk_state): return False if self.addr_from != config.dev.coinbase_address: return False if not (AddressState.address_is_valid(self.addr_from) and AddressState.address_is_valid(self.addr_to)): logger.warning('Invalid address addr_from: %s addr_to: %s', self.addr_from, self.addr_to) return False if addr_from_pk_state.ots_key_reuse(self.ots_key): logger.warning('CoinBase Txn: OTS Public key re-use detected %s', self.txhash) return False return True
def get_valid_peers(peer_ips, peer_ip, public_port): new_peers = set() for ip_port in peer_ips: try: parse_peer_addr(ip_port) new_peers.add(ip_port) except Exception as e: logger.warning("Invalid Peer Address %s", ip_port) logger.warning("Sent by %s", peer_ip) logger.exception(e) if 0 < public_port <= 65535: new_peers.add("{0}:{1}".format(peer_ip, public_port)) return new_peers
def validate_parent_child_relation(self, parent_block): if parent_block.block_number != self.block_number - 1: logger.warning('Block numbers out of sequence: failed validation') return False if parent_block.headerhash != self.prev_blockheaderhash: logger.warning('Headerhash not in sequence: failed validation') return False if self.timestamp < parent_block.timestamp: logger.warning('BLOCK timestamp is less than prev block timestamp') logger.warning('block timestamp %s ', self.timestamp) logger.warning('must be greater than or equals to %s', parent_block.timestamp) return False return True
def validate(self) -> bool: fee_reward = 0 for index in range(1, len(self.transactions)): fee_reward += self.transactions[index].fee if len(self.transactions) == 0: return False try: coinbase_txn = Transaction.from_pbdata(self.transactions[0]) coinbase_amount = coinbase_txn.amount except Exception as e: logger.warning('Exception %s', e) return False return self.blockheader.validate(fee_reward, coinbase_amount)
def validate_parent_child_relation(self, parent_block): if parent_block.block_number != self.block_number - 1: logger.warning('Block numbers out of sequence: failed validation') return False if parent_block.headerhash != self.prev_blockheaderhash: logger.warning('Headerhash not in sequence: failed validation') return False if self.timestamp < parent_block.timestamp: logger.warning('BLOCK timestamp is less than prev block timestamp') logger.warning('block timestamp %s ', self.timestamp) logger.warning('must be greater than or equals to %s', parent_block.timestamp) return False return True
def start_download(self): # FIXME: Why PoW is downloading blocks? # add peers and their identity to requested list # FMBH if self.sync_state.state == ESyncState.synced: return logger.info('Checking Download..') if self.p2p_factory.connections == 0: logger.warning('No connected peers. Moving to synced state') self.update_node_state(ESyncState.synced) return self.update_node_state(ESyncState.syncing) logger.info('Initializing download from %s', self.chain_manager.height + 1) self.p2p_factory.randomize_block_fetch()
def monitor_connections(self): reactor.callLater(config.user.monitor_connections_interval, self.monitor_connections) if len(self._peer_connections) == 0: logger.warning('No Connected Peer Found') reactor.callLater(10, self._qrl_node.peer_manager.connect_peers) return connected_peers_set = set() for conn_protocol in self._peer_connections: connected_peers_set.add(conn_protocol.peer.full_address) for peer_item in config.user.peer_list: peer_metadata = IPMetadata.from_full_address(peer_item) if peer_metadata.full_address not in connected_peers_set: self.connect_peer(peer_metadata.full_address)
def validate(self) -> bool: """ This method calls validate_or_raise, logs any failure and returns True or False accordingly The main purpose is to avoid exceptions and accomodate legacy code :return: True is the transation is valid :rtype: bool """ try: self.validate_or_raise() except ValueError as e: logger.info('[%s] failed validate_tx', bin2hstr(self.txhash)) logger.warning(str(e)) return False except Exception as e: logger.exception(e) return False return True
def main(): args = parse_arguments() config.create_path(config.user.wallet_dir) mining_credit_wallet = get_mining_credit_wallet(args.mining_credit_wallet) if not mining_credit_wallet: logger.warning('Invalid Mining Credit Wallet Address') logger.warning('%s', args.mining_credit_wallet) return False logger.debug( "=====================================================================================" ) logger.info("Data Path: %s", args.data_dir) config.user.data_dir = args.data_dir config.create_path(config.user.data_dir) ntp.setDrift() logger.info('Initializing chain..') persistent_state = State() chain_manager = ChainManager(state=persistent_state) chain_manager.load(Block.from_json(GenesisBlock().to_json())) qrlnode = QRLNode(db_state=persistent_state, mining_credit_wallet=mining_credit_wallet) qrlnode.set_chain_manager(chain_manager) set_logger(args, qrlnode.sync_state) ####### # NOTE: Keep assigned to a variable or might get collected admin_service, grpc_service, mining_service = start_services(qrlnode) qrlnode.start_listening() qrlnode.connect_peers() qrlnode.start_pow(args.mining_thread_count) logger.info('QRL blockchain ledger %s', config.dev.version) logger.info('mining/staking address %s', args.mining_credit_wallet) # FIXME: This will be removed once we move away from Twisted reactor.run()
def get_last_txs(state: State): try: last_txn = LastTransactions.deserialize( state._db.get_raw(b'last_txn')) except KeyError: return [] except Exception as e: # noqa logger.warning("[get_last_txs] Exception during call %s", e) return [] txs = [] for tx_metadata in last_txn.tx_metadata: data = tx_metadata.transaction tx = Transaction.from_pbdata(data) txs.append(tx) return txs
def validate(self, verify_signature=True) -> bool: """ This method calls validate_or_raise, logs any failure and returns True or False accordingly The main purpose is to avoid exceptions and accommodate legacy code :return: True is the transaction is valid :rtype: bool """ try: self.validate_or_raise(verify_signature) except ValueError as e: logger.info('[%s] failed validate_tx', bin2hstr(self.txhash)) logger.warning(str(e)) return False except Exception as e: logger.exception(e) return False return True
def remove(self, address_state, value: bytes): address_state.update_counter_by_name(self.name, value=1, subtract=True) key = address_state.address count = address_state.get_counter_by_name(self.name) storage_key = self.generate_key(address_state.address, count) if storage_key not in self.key_value: self.key_value[storage_key] = self.get_paginated_data(key, count) if self.key_value[storage_key][-1] != value: logger.warning("Expected value %s", self.key_value[storage_key][-1]) logger.warning("Found value %s", value) raise Exception("Unexpected value into storage") del self.key_value[storage_key][-1]
def start_download(self): # FIXME: Why PoW is downloading blocks? # add peers and their identity to requested list # FMBH if self.sync_state.state == ESyncState.synced: return logger.info('Checking Download..') if self.p2p_factory.connections == 0: logger.warning('No connected peers. Moving to synced state') self.update_node_state(ESyncState.synced) return self.update_node_state(ESyncState.syncing) logger.info('Initializing download from %s', self.chain_manager.height + 1) self.p2p_factory.randomize_block_fetch()
def dataReceived(self, data: bytes) -> None: self._buffer += data total_read = len(self._buffer) if total_read > config.dev.max_bytes_out: logger.warning('Disconnecting peer %s', self.peer_ip) logger.warning('Buffer Size %s', len(self._buffer)) self.loseConnection() read_bytes = [0] for msg in self._parse_buffer(read_bytes): self._observable.notify(msg) if read_bytes[0]: p2p_ack = qrl_pb2.P2PAcknowledgement(bytes_processed=read_bytes[0]) msg = qrllegacy_pb2.LegacyMessage(func_name=qrllegacy_pb2.LegacyMessage.P2P_ACK, p2pAckData=p2p_ack) self.send(msg)
def create(blocknumber: int, mining_nonce: int, PK: bytes, prev_blockheaderhash: bytes, hashedtransactions: bytes, fee_reward: int): """ Create a block header based on the parameters >>> BlockHeader.create(blocknumber=1, mining_nonce=1, PK=b'publickey', ... prev_blockheaderhash=b'headerhash', ... hashedtransactions=b'some_data', fee_reward=1) is not None True >>> b=BlockHeader.create(blocknumber=1, mining_nonce=1, PK=b'publickey', ... prev_blockheaderhash=b'headerhash', ... hashedtransactions=b'some_data', fee_reward=1) >>> b.epoch 0 """ bh = BlockHeader() bh._data.block_number = blocknumber bh._data.epoch = bh._data.block_number // config.dev.blocks_per_epoch if bh._data.block_number != 0: bh._data.timestamp.seconds = int(ntp.getTime()) if bh._data.timestamp == 0: logger.warning('Failed to get NTP timestamp') return bh._data.hash_header_prev = prev_blockheaderhash bh._data.merkle_root = hashedtransactions bh._data.PK = PK bh._data.reward_fee = fee_reward bh._data.reward_block = 0 if bh._data.block_number != 0: bh._data.reward_block = bh.block_reward_calc() bh.set_mining_nonce(mining_nonce) return bh
def validate_extended(self, addr_from_state, addr_from_pk_state, transaction_pool): if not self.validate_slave(addr_from_state, addr_from_pk_state): return False tx_balance = addr_from_state.balance if self.fee < 0: logger.info('State validation failed for %s because: Negative send', self.txhash) return False if not AddressState.address_is_valid(self.addr_from): logger.warning('Invalid address addr_from: %s', self.addr_from) return False if not AddressState.address_is_valid(self.owner): logger.warning('Invalid address owner_addr: %s', self.owner) return False for address_balance in self.initial_balances: if not AddressState.address_is_valid(address_balance.address): logger.warning('Invalid address address in initial_balances: %s', address_balance.address) return False if tx_balance < self.fee: logger.info('TokenTxn State validation failed for %s because: Insufficient funds', self.txhash) logger.info('balance: %s, Fee: %s', tx_balance, self.fee) return False if self.ots_key_reuse(addr_from_pk_state, self.ots_key): logger.info('TokenTxn State validation failed for %s because: OTS Public key re-use detected', self.txhash) return False return True
def start_mining(self, tx_pool, parent_block, parent_difficulty, thread_count=config.user.mining_thread_count): mining_xmss = self.get_mining_xmss() if not mining_xmss: logger.warning('No Mining XMSS Found') return try: self.cancel() self._mining_block = self.create_block(last_block=parent_block, mining_nonce=0, tx_pool=tx_pool, signing_xmss=self._mining_xmss, master_address=self._master_address) measurement = self.state.get_measurement(self._mining_block.timestamp, self._mining_block.prev_headerhash) current_difficulty, current_target = self._difficulty_tracker.get( measurement=measurement, parent_difficulty=parent_difficulty) input_bytes, nonce_offset = self._get_mining_data(self._mining_block) logger.debug('!!! Mine #{} | {} ({}) | {} -> {} | {}'.format( self._mining_block.block_number, measurement, self._mining_block.timestamp - parent_block.timestamp, UInt256ToString(parent_difficulty), UInt256ToString(current_difficulty), current_target )) logger.debug('!!! {}'.format(current_target)) self.start(input=input_bytes, nonceOffset=nonce_offset, target=current_target, thread_count=thread_count) except Exception as e: logger.warning("Exception in start_mining") logger.exception(e)
def _try_branch_add_block(self, block, batch=None) -> bool: parent_block = self.state.get_block(block.prev_headerhash) if not block.validate_parent_child_relation(parent_block): logger.warning('Failed to validate blocks parent child relation') return False address_set = self.state.prepare_address_list(block) # Prepare list for current block if self.last_block.headerhash == block.prev_headerhash: address_txn = self.state.get_state_mainchain(address_set) else: address_txn = self.state.get_state(block.prev_headerhash, address_set) if self.validate_block(block, address_txn): self.state.put_block(block, None) self.add_block_metadata(block.headerhash, block.timestamp, block.prev_headerhash, None) last_block_metadata = self.state.get_block_metadata(self.last_block.headerhash) new_block_metadata = self.state.get_block_metadata(block.headerhash) last_block_difficulty = int(UInt256ToString(last_block_metadata.cumulative_difficulty)) new_block_difficulty = int(UInt256ToString(new_block_metadata.cumulative_difficulty)) self.trigger_miner = False if new_block_difficulty > last_block_difficulty: if self.last_block.headerhash != block.prev_headerhash: self.rollback(block) return True self.state.update_mainchain_state(address_txn, block.block_number, block.headerhash) self.last_block = block self._update_mainchain(block, batch) self.tx_pool.remove_tx_in_block_from_pool(block) self.state.update_mainchain_height(block.block_number, batch) self.state.update_tx_metadata(block, batch) self.trigger_miner = True return True return False
def validate_slave(self, addr_from_state, addr_from_pk_state): addr_from_pk = getAddress('Q', self.PK) if isinstance(self, CoinBase): master_address = self.txto allowed_access_types = [0, 1] else: master_address = self.txfrom allowed_access_types = [0] if addr_from_pk.encode() != master_address: if str(self.PK) not in addr_from_state.slave_pks_access_type: logger.warning('Public key and address dont match') return False access_type = addr_from_pk_state.slave_pks_access_type[str(self.PK)] if access_type not in allowed_access_types: logger.warning('Access Type %s', access_type) logger.warning('Slave Address doesnt have sufficient permission') return False return True
def mining_wallet_checks(args): slaves_filename = os.path.join(config.user.wallet_dir, config.user.slaves_filename) if args.randomizeSlaveXMSS: addrBundle = Wallet.get_new_address() slaves = [addrBundle.xmss.get_address(), [addrBundle.xmss.get_seed()], None] write_slaves(slaves_filename, slaves) try: slaves = read_slaves(slaves_filename) except FileNotFoundError: logger.warning('No Slave Seeds found!!') logger.warning('It is highly recommended to use the slave for mining') try: ans = input('Do you want to use main wallet for mining? (Y/N) ') if ans == 'N': quit(0) seed = input('Enter hex or mnemonic seed of mining wallet ').encode() except KeyboardInterrupt: quit(0) if len(seed) == 96: # hexseed bin_seed = hstr2bin(seed.decode()) elif len(seed.split()) == 32: bin_seed = mnemonic2bin(seed.decode()) else: logger.warning('Invalid XMSS seed') quit(1) addrBundle = Wallet.get_new_address(seed=bin_seed) slaves = [addrBundle.xmss.get_address(), [addrBundle.xmss.get_seed()], None] write_slaves(slaves_filename, slaves) slaves = read_slaves(slaves_filename) except KeyboardInterrupt: quit(1) except Exception as e: logger.error('Exception %s', e) quit(1) return slaves
def validate_block(self, block, address_txn) -> bool: len_transactions = len(block.transactions) if len_transactions < 1: return False coinbase_tx = Transaction.from_pbdata(block.transactions[0]) coinbase_tx.validate() if not self.validate_mining_nonce(block): return False if coinbase_tx.subtype != qrl_pb2.Transaction.COINBASE: return False if not coinbase_tx.validate(): return False coinbase_tx.apply_on_state(address_txn) addr_from_pk_state = address_txn[coinbase_tx.txto] addr_from_pk = Transaction.get_slave(coinbase_tx) if addr_from_pk: addr_from_pk_state = address_txn[addr_from_pk] if not coinbase_tx.validate_extended(address_txn[coinbase_tx.txto], addr_from_pk_state, []): return False # TODO: check block reward must be equal to coinbase amount for tx_idx in range(1, len_transactions): tx = Transaction.from_pbdata(block.transactions[tx_idx]) if tx.subtype == qrl_pb2.Transaction.COINBASE: return False if not tx.validate(): # TODO: Move this validation, before adding txn to pool return False addr_from_pk_state = address_txn[tx.txfrom] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = address_txn[addr_from_pk] if not tx.validate_extended(address_txn[tx.txfrom], addr_from_pk_state, []): return False expected_nonce = address_txn[tx.txfrom].nonce + 1 if tx.nonce != expected_nonce: logger.warning('nonce incorrect, invalid tx') logger.warning('subtype: %s', tx.subtype) logger.warning('%s actual: %s expected: %s', tx.txfrom, tx.nonce, expected_nonce) return False if tx.ots_key_reuse(address_txn[tx.txfrom], tx.ots_key): logger.warning('pubkey reuse detected: invalid tx %s', tx.txhash) logger.warning('subtype: %s', tx.subtype) return False tx.apply_on_state(address_txn) return True
def create_block(self, last_block, mining_nonce, tx_pool, signing_xmss, master_address) -> Optional[Block]: # TODO: Persistence will move to rocksdb # FIXME: Difference between this and create block????????????? # FIXME: Break encapsulation dummy_block = Block.create(mining_nonce=mining_nonce, block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=[], signing_xmss=signing_xmss, master_address=master_address, nonce=0) dummy_block.set_mining_nonce(mining_nonce) signing_xmss.set_index(signing_xmss.get_index() - 1) t_pool2 = copy.deepcopy(tx_pool.transaction_pool) del tx_pool.transaction_pool[:] ###### # recreate the transaction pool as in the tx_hash_list, ordered by txhash.. total_txn = len(t_pool2) txnum = 0 addresses_set = set() while txnum < total_txn: tx = t_pool2[txnum] tx.set_effected_address(addresses_set) txnum += 1 addresses_state = dict() for address in addresses_set: addresses_state[address] = self.state.get_address(address) block_size = dummy_block.size block_size_limit = self.state.get_block_size_limit(last_block) txnum = 0 while txnum < total_txn: tx = t_pool2[txnum] # Skip Transactions for later, which doesn't fit into block if block_size + tx.size + config.dev.tx_extra_overhead > block_size_limit: txnum += 1 continue addr_from_pk_state = addresses_state[tx.txfrom] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = addresses_state[addr_from_pk] if tx.ots_key_reuse(addr_from_pk_state, tx.ots_key): del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TRANSFER: if addresses_state[tx.txfrom].balance < tx.amount + tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.amount) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.MESSAGE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid message tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Free %s', addresses_state[tx.txfrom].balance, tx.fee) total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TOKEN: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Fee %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.TRANSFERTOKEN: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('%s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if bin2hstr(tx.token_txhash).encode() not in addresses_state[tx.txfrom].tokens: logger.warning('%s doesnt own any token with token_txnhash %s', tx.txfrom, bin2hstr(tx.token_txhash).encode()) del t_pool2[txnum] total_txn -= 1 continue if addresses_state[tx.txfrom].tokens[bin2hstr(tx.token_txhash).encode()] < tx.amount: logger.warning('Token Transfer amount exceeds available token') logger.warning('Token Txhash %s', bin2hstr(tx.token_txhash).encode()) logger.warning('Available Token Amount %s', addresses_state[tx.txfrom].tokens[bin2hstr(tx.token_txhash).encode()]) logger.warning('Transaction Amount %s', tx.amount) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.LATTICE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('Lattice TXN %s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue if tx.subtype == qrl_pb2.Transaction.SLAVE: if addresses_state[tx.txfrom].balance < tx.fee: logger.warning('Slave TXN %s %s exceeds balance, invalid tx', tx, tx.txfrom) logger.warning('subtype: %s', tx.subtype) logger.warning('Buffer State Balance: %s Transfer Amount %s', addresses_state[tx.txfrom].balance, tx.fee) del t_pool2[txnum] total_txn -= 1 continue tx.apply_on_state(addresses_state) tx_pool.add_tx_to_pool(tx) tx._data.nonce = addresses_state[tx.txfrom].nonce txnum += 1 block_size += tx.size + config.dev.tx_extra_overhead coinbase_nonce = self.state.get_address(signing_xmss.get_address()).nonce if signing_xmss.get_address() in addresses_state: coinbase_nonce = addresses_state[signing_xmss.get_address()].nonce + 1 block = Block.create(mining_nonce=mining_nonce, block_number=last_block.block_number + 1, prevblock_headerhash=last_block.headerhash, transactions=t_pool2, signing_xmss=signing_xmss, master_address=master_address, nonce=coinbase_nonce) return block
def block_received(self, source, block: Block): self.pow.last_pb_time = time.time() logger.info('>>> Received Block #%d %s', block.block_number, bin2hstr(block.headerhash)) if source != self._target_peer: logger.warning('Received block from unexpected peer') logger.warning('Expected peer: %s', self._target_peer.connection_id) logger.warning('Found peer: %s', source.connection_id) return if block.block_number != self._last_requested_block_idx: logger.warning('Did not match %s', self._last_requested_block_idx) return target_start_blocknumber = self._target_node_header_hash.block_number expected_headerhash = self._target_node_header_hash.headerhashes[block.block_number - target_start_blocknumber] if block.headerhash != expected_headerhash: logger.warning('Did not match headerhash') logger.warning('Expected headerhash %s', expected_headerhash) logger.warning('Found headerhash %s', block.headerhash) return # FIXME: This check should not be necessary if not self._chain_manager.add_block(block): logger.warning('Failed to Add Block') return try: reactor.download_monitor.cancel() except Exception as e: logger.warning("PB: %s", e) if self.is_syncing_finished(): return self._last_requested_block_idx += 1 if self.is_syncing_finished(): return self.peer_fetch_block()