def handle_block(self, source, message: xrdlegacy_pb2.LegacyMessage): # block received """ Block This function processes any new block received. :return: """ P2PBaseObserver._validate_message(message, xrdlegacy_pb2.LegacyMessage.BK) try: block = Block(message.block) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', source.peer) logger.exception(e) return logger.info('>>>Received block from %s %s %s', source.peer.full_address, block.block_number, bin2hstr(block.headerhash)) if not source.factory.master_mr.isRequested(block.headerhash, source, block): return source.factory.pow.pre_block_logic( block) # FIXME: Ignores return value source.factory.master_mr.register(xrdlegacy_pb2.LegacyMessage.BK, block.headerhash, message.block)
def _store(self): if self._filename is not None: try: with open(self._filename, 'w') as f: json.dump(self._data, f) except Exception as e: logger.error("not possible to save banned peers") logger.exception(e)
def get_state(state: State, key: bytes): try: data = state._db.get_raw(key) return ProposalRecord.deserialize(data) except KeyError: logger.debug('[get_state] ProposalRecord not found') except Exception as e: logger.error('[get_state] %s', e) return ProposalRecord()
def get_dev_config_current_state_key(self): try: return self._db.get_raw(b'dev_config_current_state_key') except KeyError: logger.debug( '[get_dev_config_current_state_key] Dev Config not found') except Exception as e: logger.error('[get_dev_config_current_state_key] %s', e) return None
def get_token_metadata(state: State, token_txhash: bytes): try: data = state._db.get_raw(b'token_' + token_txhash) return TokenMetadata.deserialize(data) except KeyError: pass except Exception as e: logger.error('[get_token_metadata] %s', e) return None
def get_mainchain_height(self) -> int: try: return int.from_bytes(self._db.get_raw(b'blockheight'), byteorder='big', signed=False) except KeyError: pass except Exception as e: logger.error('get_blockheight Exception %s', e) return -1
def get_re_org_limit(self) -> int: try: return int.from_bytes(self._db.get_raw(b'reorg_limit'), byteorder='big', signed=False) except KeyError: return 0 except Exception as e: logger.error('get_re_org_limit Exception %s', e) return -1
def get_address_is_used(self, address: bytes) -> bool: # FIXME: Probably obsolete try: return self._db.get_raw(address) except KeyError: return False except Exception as e: # FIXME: Review logger.error('Exception in address_used') logger.exception(e) raise
def get_block(state: State, header_hash: bytes): try: data = state._db.get_raw(header_hash) return Block.deserialize(data) except KeyError: logger.debug('[get_block] Block header_hash %s not found', bin2hstr(header_hash).encode()) except Exception as e: logger.error('[get_block] %s', e) return None
def get_state(state: State, shared_key): try: data = state._db.get_raw(b'shared_key_' + shared_key) return VoteStats.deserialize(data) except KeyError: logger.debug('[get_state] VoteStats %s not found', bin2hstr(shared_key).encode()) except Exception as e: logger.error('[get_state] %s', e) return None
def get_fork_state(self) -> Optional[xrdstateinfo_pb2.ForkState]: try: data = self._db.get_raw(b'fork_state') fork_state = xrdstateinfo_pb2.ForkState() fork_state.ParseFromString(bytes(data)) return fork_state except KeyError: return None except Exception as e: logger.error('Exception in get_fork_state') logger.exception(e) raise
def get_dev_config_state(self, dev_config_state_key: bytes): try: data = self._db.get_raw(dev_config_state_key) pbdata = xrd_pb2.DevConfig() pbdata.ParseFromString(bytes(data)) return pbdata except KeyError: logger.debug('[get_dev_config_state] Dev Config not found') except Exception as e: logger.error('[get_dev_config_state] %s', e) return None
def get_block_number_mapping(state: State, block_number: int): try: data = state._db.get_raw(str(block_number).encode()) block_number_mapping = xrd_pb2.BlockNumberMapping() return Parse(data, block_number_mapping) except KeyError: logger.debug('[get_block_number_mapping] Block #%s not found', block_number) except Exception as e: logger.error('[get_block_number_mapping] %s', e) return None
def get_paginated_data(self, key, count) -> list: storage_key = self.generate_key(key, count) try: pbData = self.db.get_raw(storage_key) data_list = xrd_pb2.DataList() data_list.ParseFromString(bytes(pbData)) return list(data_list.values) except KeyError: return [] except Exception as e: logger.error('[get_paginated_data] Exception for %s', self.name) logger.exception(e) raise
def get_slave_pk_access_type(self, address: bytes, slave_pk: bytes) -> xrd_pb2.SlaveMetadata: slave_key = self.generate_slave_key(address, slave_pk) try: slave_metadata = xrd_pb2.SlaveMetadata() slave_metadata.ParseFromString(self._db.get_raw(slave_key)) return slave_metadata except KeyError: pass except Exception as e: logger.error('[get_slave_pk_access_type] %s', e) return None
def get_paginated_data(self, key, page): try: pbData = self.db.get_raw( self.name + b'_' + key + b'_' + page.to_bytes(8, byteorder='big', signed=False)) data_list = xrd_pb2.DataList() data_list.ParseFromString(bytes(pbData)) return list(data_list.values) except KeyError: return [b'\x00'] * config.dev.ots_bitfield_size except Exception as e: logger.error('[get_paginated_data] Exception for %s', self.name) logger.exception(e) raise
def get_token(self, address: bytes, token_txhash: bytes) -> xrd_pb2.TokenBalance: try: token_balance = xrd_pb2.TokenBalance() token_balance.ParseFromString( self._db.get_raw(self.generate_token_key( address, token_txhash))) return token_balance except KeyError: pass except Exception as e: logger.error('[get_token] %s', e) return None
def handle_multi_sig_vote(source, message: xrdlegacy_pb2.LegacyMessage): """ Handles Multi Sig Transaction :param source: :param message: :return: """ P2PBaseObserver._validate_message(message, xrdlegacy_pb2.LegacyMessage.MV) try: tx = Transaction.from_pbdata(message.mvData) except Exception as e: logger.error( 'multi_sig_vote txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if source.factory.master_mr.isRequested(tx.get_message_hash(), source): source.factory.add_unprocessed_txn(tx, source.peer.ip)
def handle_lattice(source, message: xrdlegacy_pb2.LegacyMessage): """ Receives Lattice Public Key Transaction :param source: :param message: :return: """ P2PBaseObserver._validate_message(message, xrdlegacy_pb2.LegacyMessage.LT) try: tx = Transaction.from_pbdata(message.ltData) except Exception as e: logger.error( 'lattice_public_key rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if source.factory.master_mr.isRequested(tx.get_message_hash(), source): source.factory.add_unprocessed_txn(tx, source.peer.ip)
def handle_token_transaction(source, message: xrdlegacy_pb2.LegacyMessage): """ Token Transaction This function processes whenever a Transaction having subtype TOKEN is received. :return: """ P2PBaseObserver._validate_message(message, xrdlegacy_pb2.LegacyMessage.TK) try: tx = Transaction.from_pbdata(message.tkData) except Exception as e: logger.error( 'Token Txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if source.factory.master_mr.isRequested(tx.get_message_hash(), source): source.factory.add_unprocessed_txn(tx, source.peer.ip)
def handle_tx(source, message: xrdlegacy_pb2.LegacyMessage): """ Transaction Executed whenever a new TX type message is received. :return: """ P2PBaseObserver._validate_message(message, xrdlegacy_pb2.LegacyMessage.TX) try: tx = Transaction.from_pbdata(message.txData) except Exception as e: logger.error( 'Message Txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return # NOTE: Connects to MR if source.factory.master_mr.isRequested(tx.get_message_hash(), source): source.factory.add_unprocessed_txn(tx, source.peer.ip)
def validate(self, data): try: for key in self.params.keys(): if self.params[key] != data[key]: return False return True except KeyError as k: logger.error('Params Keys %s', self.params.keys()) logger.error('Data Keys %s', data.keys()) logger.error('Key Not found %s ', k) except AttributeError as k: logger.error( 'MessageRequest.params was not initialized before calling validate()' ) return False
def handle_push_block(self, source, message: xrdlegacy_pb2.LegacyMessage): """ Push Block This function processes requested blocks received while syncing. Block received under this function are directly added to the main chain i.e. chain.blockchain It is expected to receive only one block for a given blocknumber. :return: """ # FIXME: Later rename P2PBaseObserver._validate_message(message, xrdlegacy_pb2.LegacyMessage.PB) if message.pbData is None: return try: block = Block(message.pbData.block) source.factory.block_received(source, block) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', source.peer) logger.exception(e)
def _validate_extended(self, state_container: StateContainer): if state_container.block_number < state_container.current_dev_config.hard_fork_heights[0]: logger.warning("[MultiSigSpend] Hard Fork Feature not yet activated") return False if len(self.addrs_to) > state_container.current_dev_config.transaction_multi_output_limit: logger.warning('[MultiSigSpend] Number of addresses exceeds max limit') logger.warning('Number of addresses %s', len(self.addrs_to)) logger.warning('Number of amounts %s', len(self.amounts)) return False addr_from_state = state_container.addresses_state[self.addr_from] if self.multi_sig_address not in state_container.addresses_state: logger.error("[MultiSigSpend] Multi Sig address state not found in state_container %s", self.multi_sig_address) return False multi_sig_address_state = state_container.addresses_state[self.multi_sig_address] block_number = state_container.block_number if addr_from_state.address != self.addr_from: logger.error("[MultiSigSpend] Unexpected addr_from_state") logger.error("Expecting State for address %s, but got state for address %s", bin2hstr(self.addr_from), bin2hstr(addr_from_state.address)) return False if multi_sig_address_state.address != self.multi_sig_address: logger.error("[MultiSigSpend] Unexpected multi sig address state") logger.error("Expecting State for address %s, but got state for address %s", bin2hstr(self.multi_sig_address), bin2hstr(multi_sig_address_state.address)) return False tx_balance = addr_from_state.balance total_amount = self.total_amount if tx_balance < self.fee: logger.info('[MultiSigSpend] State validation failed for %s because: Insufficient funds', bin2hstr(self.txhash)) logger.info('address: %s, balance: %s, fee: %s', bin2hstr(self.addr_from), tx_balance, self.fee) return False if multi_sig_address_state.balance < total_amount: logger.info('[MultiSigSpend] State validation failed for %s because: Insufficient funds', bin2hstr(self.txhash)) logger.info('address: %s, balance: %s, fee: %s', bin2hstr(self.multi_sig_address), tx_balance, self.fee) return False # Multi Sig Spend considered to be expired after block having block number equals to # self.expiry_block_number gets added into the main chain if self.expiry_block_number <= block_number: logger.info('[MultiSigSpend] State validation failed for %s due to invalid expiry_block_number', bin2hstr(self.txhash)) logger.info('Chain Height: %s, Expiry Block Number: %s', block_number, self.expiry_block_number) return False if self.addr_from not in multi_sig_address_state.signatories: logger.info('[MultiSigSpend] Address is not in the signatories list: %s', bin2hstr(self.addr_from)) return False return True
def reset_processor_flag_with_err(self, msg): logger.error('Exception in txn task') logger.error('%s', msg) self._txn_processor_running = False
def create_block(self, last_block, mining_nonce, tx_pool: TransactionPool, miner_address) -> Optional[Block]: seed_block = self._chain_manager.get_block_by_number( self._qn.get_seed_height(last_block.block_number + 1)) dev_config = self._chain_manager.get_config_by_block_number( block_number=last_block.block_number + 1) dummy_block = Block.create(dev_config=dev_config, block_number=last_block.block_number + 1, prev_headerhash=last_block.headerhash, prev_timestamp=last_block.timestamp, transactions=[], miner_address=miner_address, seed_height=seed_block.block_number, seed_hash=seed_block.headerhash) dummy_block.set_nonces(dev_config, mining_nonce, 0) t_pool2 = tx_pool.transactions block_size = dummy_block.size block_size_limit = self._chain_manager.get_block_size_limit( last_block, dev_config) transactions = [] state_container = self._chain_manager.new_state_container( set(), last_block.block_number, True, None) for tx_set in t_pool2: tx = tx_set[1].transaction # Skip Transactions for later, which doesn't fit into block if block_size + tx.size + dev_config.tx_extra_overhead > block_size_limit: continue if not self._chain_manager.update_state_container( tx, state_container): logger.error("[create_block] Error updating state_container") return None if not tx.validate_all(state_container, check_nonce=False): if not state_container.revert_update(): return None tx_pool.remove_tx_from_pool(tx) continue if not self._chain_manager.apply_txn(tx, state_container): logger.error("[create_block] Failed to apply txn") if not state_container.revert_update(): return None continue addr_from_pk_state = state_container.addresses_state[tx.addr_from] addr_from_pk = Transaction.get_slave(tx) if addr_from_pk: addr_from_pk_state = state_container.addresses_state[ addr_from_pk] tx._data.nonce = addr_from_pk_state.nonce block_size += tx.size + dev_config.tx_extra_overhead transactions.append(tx) block = Block.create(dev_config=dev_config, block_number=last_block.block_number + 1, prev_headerhash=last_block.headerhash, prev_timestamp=last_block.timestamp, transactions=transactions, miner_address=miner_address, seed_height=seed_block.block_number, seed_hash=seed_block.headerhash) return block