def prepare_next_unmined_block_template(self, mining_address, tx_pool, parent_block: Block, parent_difficulty, dev_config: DevConfig): miner = self.get_miner(parent_block.block_number + 1, dev_config) try: logger.debug('Miner-Try - prepare_next_unmined_block_template') with self.lock: logger.debug('Miner-Locked - prepare_next_unmined_block_template') logger.debug('Miner-TryCancel - prepare_next_unmined_block_template') miner.cancel() logger.debug('Miner-Cancel - prepare_next_unmined_block_template') self._mining_block = self.create_block(last_block=parent_block, mining_nonce=0, tx_pool=tx_pool, miner_address=mining_address) parent_metadata = self._chain_manager.get_block_metadata(parent_block.headerhash) self._measurement = self._chain_manager.get_measurement(dev_config, self._mining_block.timestamp, self._mining_block.prev_headerhash, parent_metadata) self._current_difficulty, self._current_target = DifficultyTracker.get( measurement=self._measurement, parent_difficulty=parent_difficulty, dev_config=dev_config) except Exception as e: logger.warning("Exception in start_mining") logger.exception(e)
def handle_message_transaction(self, source, message: qrllegacy_pb2.LegacyMessage): """ Message Transaction This function processes whenever a Transaction having subtype MESSAGE is received. :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.MT) try: tx = Transaction.from_pbdata(message.mtData) except Exception as e: logger.error( 'Message Txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if not source.factory.master_mr.isRequested(tx.get_message_hash(), source): return if tx.txhash in source.factory.buffered_chain.tx_pool.pending_tx_pool_hash: return source.factory.add_unprocessed_txn(tx, source.peer_ip)
def start_mining(self, mining_block: Block, current_target: bytes, dev_config: DevConfig): try: logger.debug('start_mining - TRY LOCK') with self.lock: logger.debug('start_mining - LOCKED') self.cancel() mining_blob = mining_block.mining_blob(dev_config) nonce_offset = mining_block.mining_nonce_offset(dev_config) seed_block = self.get_seed_block(mining_block.block_number) self._dev_config = dev_config self._mining_block = mining_block work_seq_id = self.start(mainHeight=mining_block.block_number, seedHeight=seed_block.block_number, seedHash=seed_block.headerhash, input=mining_blob, nonceOffset=nonce_offset, target=current_target, thread_count=self._mining_thread_count) logger.debug("MINING START [{}]".format(work_seq_id)) except Exception as e: logger.warning("Exception in start_mining") logger.exception(e) logger.debug('start_mining - UNLOCKED')
def handle_transfer_token_transaction( source, message: qrllegacy_pb2.LegacyMessage): """ Transfer Token Transaction This function processes whenever a Transaction having subtype TRANSFERTOKEN is received. :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.TT) try: tx = Transaction.from_pbdata(message.ttData) except Exception as e: logger.error( 'Transfer Token Txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if not source.factory.master_mr.isRequested(tx.get_message_hash(), source): return source.factory.add_unprocessed_txn(tx, source.peer_ip)
def handleEvent(self, event): # NOTE: This function usually runs in the context of a C++ thread if event.type == SOLUTION: logger.debug('handleEvent - TRY LOCK') if not self.lock.acquire(blocking=False): logger.debug('handleEvent - SKIP') return False try: logger.debug('handleEvent - LOCKED') logger.debug('Solution Found %s', event.nonce) logger.info('Hash Rate: %s H/s', self.hashRate()) cloned_block = copy.deepcopy(self._mining_block) cloned_block.set_nonces(self._dev_config, event.nonce, 0) logger.debug("Blob %s", cloned_block) logger.info('Block #%s nonce: %s', cloned_block.block_number, event.nonce) self._pre_block_logic(cloned_block) except Exception as e: logger.warning("Exception in solutionEvent") logger.exception(e) finally: logger.debug('handleEvent - UNLOCK') self.lock.release() return True
def handle_block(self, source, message: qrllegacy_pb2.LegacyMessage): # block received """ Block This function processes any new block received. :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.BK) try: block = Block(message.block) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', source.addr_remote) logger.exception(e) return logger.info('>>>Received block from %s %s %s', source.addr_remote, block.block_number, bin2hstr(block.headerhash)) if not source.factory.master_mr.isRequested(block.headerhash, source, block): return source.factory.pow.pre_block_logic( block) # FIXME: Ignores return value source.factory.master_mr.register(qrllegacy_pb2.LegacyMessage.BK, block.headerhash, message.block)
def handle_ephemeral(self, source, message: qrllegacy_pb2.LegacyMessage): """ Receives Ephemeral Message :param source: :param message: :return: """ try: encrypted_ephemeral = EncryptedEphemeralMessage(message.ephData) except Exception as e: logger.error( 'ephemeral_message rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if not source.factory.master_mr.isRequested( encrypted_ephemeral.get_message_hash(), self): return if not encrypted_ephemeral.validate(): return source.factory.broadcast_ephemeral_message( encrypted_ephemeral) # FIXME(cyyber) : Fix broken link
def start_mining(self, parent_block: Block, parent_difficulty): mining_xmss = self.get_mining_xmss() if not mining_xmss: logger.warning('No Mining XMSS Found') return try: self.cancel() mining_blob = self._mining_block.mining_blob nonce_offset = self._mining_block.mining_nonce_offset logger.debug('!!! Mine #{} | {} ({}) | {} -> {} | {}'.format( self._mining_block.block_number, self._measurement, self._mining_block.timestamp - parent_block.timestamp, UInt256ToString(parent_difficulty), UInt256ToString(self._current_difficulty), self._current_target)) self.start(input=mining_blob, nonceOffset=nonce_offset, target=self._current_target, thread_count=self._mining_thread_count) except Exception as e: logger.warning("Exception in start_mining") logger.exception(e)
def start_mining(self, parent_block: Block, parent_difficulty): try: logger.debug('start_mining - TRY LOCK') with self.lock: logger.debug('start_mining - LOCKED') self.cancel() mining_blob = self._mining_block.mining_blob nonce_offset = self._mining_block.mining_nonce_offset logger.debug('!!! Mine #{} | {} ({}) | {} -> {} | {} '.format( self._mining_block.block_number, self._measurement, self._mining_block.timestamp - parent_block.timestamp, UInt256ToString(parent_difficulty), UInt256ToString(self._current_difficulty), bin2hstr(bytearray(self._current_target)))) logger.debug('!!! Mine #{} | blob: {}'.format( self._mining_block.block_number, bin2hstr(bytearray(mining_blob)))) work_seq_id = self.start( input=mining_blob, nonceOffset=nonce_offset, target=self._current_target, thread_count=self._mining_thread_count) logger.debug("MINING START [{}]".format(work_seq_id)) except Exception as e: logger.warning("Exception in start_mining") logger.exception(e) logger.debug('start_mining - UNLOCKED')
def handle_slave(self, source, message: qrllegacy_pb2.LegacyMessage): """ Receives Lattice Public Key Transaction :param message: :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.SL) try: tx = Transaction.from_pbdata(message.slData) except Exception as e: logger.error( 'slave_txn rejected - unable to decode serialised data - closing connection' ) logger.exception(e) source.loseConnection() return if not source.factory.master_mr.isRequested(tx.get_message_hash(), source): return if not tx.validate(): logger.warning('>>>Slave Txn %s invalid state validation failed..', tx.hash) return source.factory.add_unprocessed_txn(tx, source.peer_ip)
def prepare_next_unmined_block_template(self, tx_pool, parent_block: Block, parent_difficulty): mining_xmss = self.get_mining_xmss() if not mining_xmss: logger.warning('No Mining XMSS Found') return try: self.cancel() self._mining_block = self.create_block( last_block=parent_block, mining_nonce=0, tx_pool=tx_pool, signing_xmss=self._mining_xmss, master_address=self._master_address) parent_metadata = self.state.get_block_metadata( parent_block.headerhash) self._measurement = self.state.get_measurement( self._mining_block.timestamp, self._mining_block.prev_headerhash, parent_metadata) self._current_difficulty, self._current_target = DifficultyTracker.get( measurement=self._measurement, parent_difficulty=parent_difficulty) except Exception as e: logger.warning("Exception in start_mining") logger.exception(e)
def _store(self): if self._filename is not None: try: with open(self._filename, 'w') as f: json.dump(self._data, f) except Exception as e: logger.error("not possible to save banned peers") logger.exception(e)
def notify(self, message): # FIXME: Add mutexes observers = self._observers.get(message.func_name, []) for o in observers: try: o(self.source, message) except Exception as e: logger.debug("[%s] executing %s", self.source, message) logger.exception(e)
def _parse_tx_object(source, message: qrllegacy_pb2.LegacyMessage, kind): tx = None try: tx = Transaction.from_pbdata(message.mtData) except Exception as e: logger.error('Message Txn rejected - unable to decode serialised data - closing connection') logger.exception(e) source.loseConnection() return tx
def _parse_buffer(self, total_read): # FIXME: This parsing/wire protocol needs to be replaced """ >>> from pyqrllib.pyqrllib import hstr2bin >>> p=P2PProtocol() >>> p._buffer = bytes(hstr2bin('000000191a170a0776657273696f6e120c67656e657369735f68617368'+ \ '000000191a170a0776657273696f6e120c67656e657369735f68617368')) >>> messages = p._parse_buffer([0]) >>> len(list(messages)) 2 """ chunk_size = 0 while self._buffer: if len(self._buffer) < 5: # Buffer is still incomplete as it doesn't have message size return ignore_skip = False try: chunk_size_raw = self._buffer[:4] chunk_size = struct.unpack( '>L', chunk_size_raw)[0] # is m length encoded correctly? if chunk_size <= 0: logger.debug("<X< %s", bin2hstr(self._buffer)) raise Exception("Invalid chunk size <= 0") if chunk_size > config.dev.message_buffer_size: raise Exception("Invalid chunk size > message_buffer_size") if len( self._buffer ) - 4 < chunk_size: # As 4 bytes includes chunk_size_raw ignore_skip = True # Buffer is still incomplete as it doesn't have message so skip moving buffer return message_raw = self._buffer[4:4 + chunk_size] message = qrllegacy_pb2.LegacyMessage() message.ParseFromString(message_raw) yield message except Exception as e: # no qa logger.warning( "Problem parsing message. Banning+Dropping connection") logger.exception(e) self.peer_manager.ban_channel(self) finally: if not ignore_skip: skip = 4 + chunk_size self._buffer = self._buffer[skip:] total_read[0] += skip
def get_txn_count(self, addr): try: return self._db.get((b'txn_count_' + addr)) except KeyError: pass except Exception as e: # FIXME: Review logger.error('Exception in get_txn_count') logger.exception(e) return 0
def get_ephemeral_metadata(self, msg_id: bytes): try: json_ephemeral_metadata = self._db.get_raw(b'ephemeral_' + msg_id) return EphemeralMetadata.from_json(json_ephemeral_metadata) except KeyError: pass except Exception as e: logger.exception(e) return EphemeralMetadata()
def address_used(self, address: bytes): # FIXME: Probably obsolete try: return self._get_address_state(address) except KeyError: return False except Exception as e: # FIXME: Review logger.error('Exception in address_used') logger.exception(e) raise
def address_used(self, address: bytes): # FIXME: Probably obsolete try: return self.get_address_state(address) except KeyError: return False except Exception as e: # FIXME: Review logger.error('Exception in address_used') logger.exception(e) raise
def get_txn_count(db, state_code, addr): try: return db.get(state_code + b'txn_count_' + addr) except KeyError: pass except Exception as e: # FIXME: Review logger.error('Exception in get_txn_count') logger.exception(e) return 0
def get_txn_count(self, addr): try: return self._db.get(b'txn_count_' + addr) except KeyError: pass except Exception as e: # FIXME: Review logger.error('Exception in get_txn_count') logger.exception(e) return 0
def wrap_f(caller_self, request, context): try: return f(caller_self, request, context) except ValueError as e: self._set_context(context, e, StatusCode.INVALID_ARGUMENT) logger.info(str(e)) return self.response_type() except Exception as e: self._set_context(context, e) logger.exception(e) return self.response_type()
def get_address_is_used(self, address: bytes) -> bool: # FIXME: Probably obsolete try: return self._db.get_raw(address) except KeyError: return False except Exception as e: # FIXME: Review logger.error('Exception in address_used') logger.exception(e) raise
def get_fork_state(self) -> Optional[qrlstateinfo_pb2.ForkState]: try: data = self._db.get_raw(b'fork_state') fork_state = qrlstateinfo_pb2.ForkState() fork_state.ParseFromString(bytes(data)) return fork_state except KeyError: return None except Exception as e: logger.error('Exception in get_fork_state') logger.exception(e) raise
def solutionEvent(self, nonce): # NOTE: This function usually runs in the context of a C++ thread try: logger.debug('Solution Found %s', nonce) self._mining_block.set_nonces(nonce, 0) logger.info('Block #%s nonce: %s', self._mining_block.block_number, StringToUInt256(str(nonce))[-4:]) logger.info('Hash Rate: %s H/s', self.hashRate()) cloned_block = copy.deepcopy(self._mining_block) self.pre_block_logic(cloned_block) except Exception as e: logger.warning("Exception in solutionEvent") logger.exception(e)
def solutionEvent(self, nonce): # NOTE: This function usually runs in the context of a C++ thread try: logger.debug('Solution Found %s', nonce) self._mining_block.set_mining_nonce(nonce) logger.info('Block #%s nonce: %s', self._mining_block.block_number, StringToUInt256(str(nonce))[-4:]) logger.info('Hash Rate: %s H/s', self.hashRate()) cloned_block = copy.deepcopy(self._mining_block) self.pre_block_logic(cloned_block) except Exception as e: logger.warning("Exception in solutionEvent") logger.exception(e)
def get(self, key_obj): if not isinstance(key_obj, bytes): key_obj = key_obj.encode() value_obj = self.db.Get(key_obj) try: # FIXME: This is a massive bottleneck as start up. return json.loads(value_obj.decode())['value'] except KeyError: logger.debug("Key not found %s", key_obj) except Exception as e: logger.exception(e)
def get_txn_count(self, addr): try: return int.from_bytes(self._db.get_raw(b'txn_count_' + addr), byteorder='big', signed=False) except KeyError: pass except Exception as e: # FIXME: Review logger.error('Exception in get_txn_count') logger.exception(e) return 0
def get_paginated_data(self, key, count) -> list: storage_key = self.generate_key(key, count) try: pbData = self.db.get_raw(storage_key) data_list = qrl_pb2.DataList() data_list.ParseFromString(bytes(pbData)) return list(data_list.values) except KeyError: return [] except Exception as e: logger.error('[get_paginated_data] Exception for %s', self.name) logger.exception(e) raise
def notify(self, message, force_delivery=False): # FIXME: Add mutexes observers = self._observers.get(message.func_name, []) if force_delivery and not observers: raise RuntimeError("Observer not registered for: %s" % message.func_name) for o in observers: try: o(self.source, message) except Exception as e: logger.debug("[%s] executing %s", self.source, message) logger.exception(e)
def get_paginated_data(self, key, page): try: pbData = self.db.get_raw( self.name + b'_' + key + b'_' + page.to_bytes(8, byteorder='big', signed=False)) data_list = qrl_pb2.DataList() data_list.ParseFromString(bytes(pbData)) return list(data_list.values) except KeyError: return [b'\x00'] * config.dev.ots_bitfield_size except Exception as e: logger.error('[get_paginated_data] Exception for %s', self.name) logger.exception(e) raise
def handleEvent(self, event): # NOTE: This function usually runs in the context of a C++ thread try: if event.type == SOLUTION: nonce = event.nonce self._mining_block.set_nonces(nonce, 0) logger.debug('Solution Found %s', nonce) logger.info('Block #%s nonce: %s', self._mining_block.block_number, nonce) logger.info('Hash Rate: %s H/s', self.hashRate()) cloned_block = copy.deepcopy(self._mining_block) self.pre_block_logic(cloned_block) except Exception as e: logger.warning("Exception in solutionEvent") logger.exception(e)
def get_valid_peers(peer_ips, peer_ip, public_port): new_peers = set() for ip_port in peer_ips: try: parse_peer_addr(ip_port) new_peers.add(ip_port) except Exception as e: logger.warning("Invalid Peer Address %s", ip_port) logger.warning("Sent by %s", peer_ip) logger.exception(e) if 0 < public_port <= 65535: new_peers.add("{0}:{1}".format(peer_ip, public_port)) return new_peers
def validate(self) -> bool: """ This method calls validate_or_raise, logs any failure and returns True or False accordingly The main purpose is to avoid exceptions and accomodate legacy code :return: True is the transation is valid :rtype: bool """ try: self.validate_or_raise() except ValueError as e: logger.info('[%s] failed validate_tx', bin2hstr(self.txhash)) logger.warning(str(e)) return False except Exception as e: logger.exception(e) return False return True
def handle_push_block(self, source, message: qrllegacy_pb2.LegacyMessage): """ Push Block This function processes requested blocks received while syncing. Block received under this function are directly added to the main chain i.e. chain.blockchain It is expected to receive only one block for a given blocknumber. :return: """ # FIXME: Later rename P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.PB) if message.pbData is None: return try: block = Block(message.pbData.block) source.factory.block_received(source, block) except Exception as e: logger.error('block rejected - unable to decode serialised data %s', source.peer_ip) logger.exception(e)
def start_mining(self, tx_pool, parent_block, parent_difficulty, thread_count=config.user.mining_thread_count): mining_xmss = self.get_mining_xmss() if not mining_xmss: logger.warning('No Mining XMSS Found') return try: self.cancel() self._mining_block = self.create_block(last_block=parent_block, mining_nonce=0, tx_pool=tx_pool, signing_xmss=self._mining_xmss, master_address=self._master_address) measurement = self.state.get_measurement(self._mining_block.timestamp, self._mining_block.prev_headerhash) current_difficulty, current_target = self._difficulty_tracker.get( measurement=measurement, parent_difficulty=parent_difficulty) input_bytes, nonce_offset = self._get_mining_data(self._mining_block) logger.debug('!!! Mine #{} | {} ({}) | {} -> {} | {}'.format( self._mining_block.block_number, measurement, self._mining_block.timestamp - parent_block.timestamp, UInt256ToString(parent_difficulty), UInt256ToString(current_difficulty), current_target )) logger.debug('!!! {}'.format(current_target)) self.start(input=input_bytes, nonceOffset=nonce_offset, target=current_target, thread_count=thread_count) except Exception as e: logger.warning("Exception in start_mining") logger.exception(e)
def handle_block(self, source, message: qrllegacy_pb2.LegacyMessage): # block received """ Block This function processes any new block received. :return: """ P2PBaseObserver._validate_message(message, qrllegacy_pb2.LegacyMessage.BK) try: block = Block(message.block) except Exception as e: logger.error('block rejected - unable to decode serialised data %s', source.peer_ip) logger.exception(e) return logger.info('>>>Received block from %s %s %s', source.connection_id, block.block_number, bin2hstr(block.headerhash)) if not source.factory.master_mr.isRequested(block.headerhash, source, block): return source.factory.pow.pre_block_logic(block) # FIXME: Ignores return value source.factory.master_mr.register(qrllegacy_pb2.LegacyMessage.BK, block.headerhash, message.block)