def put_epoch_seed(self, epoch_seed): try: self._db.put('epoch_seed', epoch_seed) except Exception as e: # FIXME: Review logger.exception(e) return False
def dataReceived(self, data): self.factory.recn += 1 self.isJSON = False if data.lower().startswith(b'json '): self.isJSON = True data = data[5:] try: if not self.parse_cmd(self.parse(data)): self.output['status'] = 1 self.output['message'].write( ">>> Command not recognised. Use 'help' for details \r\n") except KeyboardInterrupt as e: self.output['message'] = StringIO() self.output['message'].write( 'Unexpected Error\r\nReport to QRL Developers') logger.error('Unexpected Error WalletProtocol\n') logger.exception(e) self.output['message'] = self.output['message'].getvalue() try: if self.isJSON: self.transport.write('%s' % (str(json.dumps(self.output)), )) else: self.transport.write( bytes(str(self.output['message']), 'utf-8')) except Exception as e: logger.error( 'Walletprotocol unexpected exception while sending msg to client' ) logger.exception(e) pass del self.output self.output = {'status': 1, 'keys': [], 'message': StringIO()}
def DT(self, data): """ Duplicate Transaction This function processes whenever a Transaction having subtype DT is received. :return: """ try: duplicate_txn = Transaction.from_json(data) except Exception as e: logger.error('DT rejected') logger.exception(e) self.transport.loseConnection() return if not self.factory.master_mr.isRequested( duplicate_txn.get_message_hash(), self): return if duplicate_txn.get_message_hash( ) in self.factory.buffered_chain.tx_pool.duplicate_tx_pool: return # TODO: State validate for duplicate_txn is pending if duplicate_txn.validate(): self.factory.buffered_chain.tx_pool.add_tx_to_duplicate_pool( duplicate_txn) else: logger.debug('>>>Invalid DT txn %s', bin2hstr(duplicate_txn.get_message_hash())) return self.factory.register_and_broadcast('DT', duplicate_txn.get_message_hash(), duplicate_txn.to_json())
def VE(self, data=None): if not data: version_details = { 'version': config.dev.version_number, 'genesis_prev_headerhash': config.dev.genesis_prev_headerhash } self.transport.write( self.wrap_message('VE', helper.json_encode(version_details))) else: try: data = helper.json_decode(data) self.version = str(data['version']) logger.info('%s version: %s | genesis prev_headerhash %s', self.transport.getPeer().host, data['version'], data['genesis_prev_headerhash']) if data['genesis_prev_headerhash'] == config.dev.genesis_prev_headerhash: return logger.warning('%s genesis_prev_headerhash mismatch', self.identity) logger.warning('Expected: ', config.dev.genesis_prev_headerhash) logger.warning('Found: ', data['genesis_prev_headerhash']) except Exception as e: logger.error('Peer Caused Exception %s', self.identity) logger.exception(e) self.transport.loseConnection() return
def VE(self, data=None): """ Version If data is None then sends the version & genesis_prev_headerhash. Otherwise, process the content of data and incase of non matching, genesis_prev_headerhash, it disconnects the odd peer. :return: """ if not data: version_details = { 'version': config.dev.version, 'genesis_prev_headerhash': config.dev.genesis_prev_headerhash } self.transport.write( self.wrap_message('VE', json.dumps(version_details))) else: try: data = json.loads(data) logger.info('%s version: %s | genesis prev_headerhash %s', self.transport.getPeer().host, data['version'], data['genesis_prev_headerhash']) if data['genesis_prev_headerhash'] == config.dev.genesis_prev_headerhash: return logger.warning('%s genesis_prev_headerhash mismatch', self.conn_identity) logger.warning('Expected: %s', config.dev.genesis_prev_headerhash) logger.warning('Found: %s', data['genesis_prev_headerhash']) except Exception as e: logger.error('Peer Caused Exception %s', self.conn_identity) logger.exception(e) self.transport.loseConnection()
def DT(self, data): """ Duplicate Transaction This function processes whenever a Transaction having subtype DT is received. :return: """ try: duplicate_txn = DuplicateTransaction().json_to_transaction(data) except Exception as e: logger.error('DT rejected') logger.exception(e) self.transport.loseConnection() return if not self.factory.master_mr.isRequested(duplicate_txn.get_message_hash(), self): return if duplicate_txn.get_message_hash() in self.factory.chain.duplicate_tx_pool: return tx_state = self.factory.chain.block_chain_buffer.get_stxn_state( blocknumber=self.factory.chain.block_chain_buffer.height() + 1, addr=duplicate_txn.coinbase1.txfrom) # TODO: State validate for duplicate_txn is pending if duplicate_txn.validate_tx(): self.factory.chain.add_tx_to_duplicate_pool(duplicate_txn) else: logger.warning('>>>Invalid DT txn %s', bin2hstr(duplicate_txn.get_message_hash())) return self.factory.register_and_broadcast('DT', duplicate_txn.get_message_hash(), duplicate_txn.to_json()) return
def PB(self, data): self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return block = Block.from_json(data) blocknumber = block.blockheader.blocknumber logger.info('>>> Received Block #%d', blocknumber) if blocknumber != self.last_requested_blocknum: logger.warning('Did not match %s %s', self.last_requested_blocknum, self.identity) return if blocknumber > self.factory.chain.height(): if not self.factory.chain.block_chain_buffer.add_block_mainchain( block): logger.warning('PB failed to add block to mainchain') return try: reactor.download_monitor.cancel() except Exception as e: logger.warning("PB: %s", e) self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def recv_tx(self, json_tx_obj): try: tx = SimpleTransaction().json_to_transaction(json_tx_obj) except Exception as e: logger.info( 'tx rejected - unable to decode serialised data - closing connection' ) logger.exception(e) self.transport.loseConnection() return if not self.factory.master_mr.isRequested(tx.get_message_hash(), self): return if tx.txhash in self.factory.chain.prev_txpool or tx.txhash in self.factory.chain.pending_tx_pool_hash: return del self.factory.chain.prev_txpool[0] self.factory.chain.prev_txpool.append(tx.txhash) for t in self.factory.chain.transaction_pool: # duplicate tx already received, would mess up nonce.. if tx.txhash == t.txhash: return self.factory.chain.update_pending_tx_pool(tx, self) self.factory.master_mr.register(tx.get_message_hash(), json_tx_obj, 'TX') self.broadcast(tx.get_message_hash(), 'TX') return
def _maintain_peers(self): while True: try: for peer_metadata in self._all_peers(): if peer_metadata.peers_needs_refresh: f = peer_metadata.stub.GetKnownPeers.future( qrl_pb2.GetKnownPeersReq(), timeout=PeerManager.TIMEOUT_SECS) f.pm = peer_metadata f.add_done_callback(self._add_peers_callback) else: f = peer_metadata.stub.GetNodeState.future( qrl_pb2.GetNodeStateReq(), timeout=PeerManager.TIMEOUT_SECS) f.pm = peer_metadata f.add_done_callback(self._update_state_callback) # FIXME: QRLNode should probably depend on this tmp = [] for peer_metadata in self.stable_peers(): addr = peer_metadata.conn_addr.split(':')[0] tmp.append(addr) # self.qrlnode.update_peer_addresses(tmp) sleep(self.REFRESH_CYCLE_SECS) self.recycle() except Exception as e: logger.exception(e)
def f_read_wallet(self): addr_list = [] if os.path.isfile(self.wallet_dat_filename) is False: logger.info('Creating new wallet file... (this could take up to a minute)') seed = None # For AWS test only tmp_seed = self.retrieve_seed_from_mnemonic() if tmp_seed is not None: seed = tmp_seed addr_list.append(self.getnewaddress(SIGNATURE_SIZE, 'XMSS', SEED=seed)) with open(self.wallet_dat_filename, "a") as myfile: # add in a new call to create random_otsmss pickle.dump(addr_list, myfile) while True: try: with open(self.wallet_dat_filename, 'r') as myfile: return pickle.load(myfile) except Exception as e: logger.warning('Wallet.dat corrupted') logger.exception(e) logger.warning('Trying to recover') if self.recover_wallet(): continue logger.error('Failed to Recover Wallet') sys.exit()
def verify(suffix, peerIdentity, chain, randomize_headerhash_fetch): mini_block = json.loads(suffix) blocknumber = mini_block['blocknumber'] if blocknumber in pending_blocks and pending_blocks[blocknumber][ 0] == peerIdentity: logger.info('Found in Fork Pending List') try: pending_blocks[blocknumber][3].cancel() except Exception as e: logger.exception(e) del pending_blocks[blocknumber] if mini_block['headerhash'] == chain.m_get_block( blocknumber ).blockheader.headerhash: # Matched so fork root is the block next to it unfork(blocknumber + 1, chain) return if blocknumber >= epoch_minimum_blocknumber: randomize_headerhash_fetch(blocknumber - 1) else: logger.info( '******Seems like chain has been forked in previous epoch... ' 'Manual intervention is required!!!!!******')
def get_lattice_public_key(self, address): try: return set(self._db.get(b'lattice_' + address)) except KeyError: return set() except Exception as e: logger.exception(e) return False
def get_ntp_response(): try: ntp_client = NTPClient() response = ntp_client.request(ntp_server, version=version) except Exception as e: logger.exception(e) sys.exit(0) return response
def get(self, key_obj): value_obj = self.db.Get(key_obj) try: return json.loads(value_obj)['value'] except KeyError as e: logger.error("Key not found %s", key_obj) logger.exception(e) except Exception as e: logger.exception(e)
def PBB(self, data): """ Push Block Buffer This function executes while syncing block from other peers. Blocks received by this function, directly added into chain.block_chain_buffer. So it is expected to receive multiple of blocks having same blocknumber. :return: """ self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return data = helper.json_decode(data) blocknumber = int(list(data.keys())[0].encode('ascii')) if blocknumber != self.last_requested_blocknum: logger.info('Blocknumber not found in pending_blocks %s %s', blocknumber, self.conn_identity) return for jsonBlock in data[str(blocknumber)]: block = Block.from_json(json.dumps(jsonBlock)) logger.info('>>>Received Block #%s', block.blockheader.blocknumber) status = self.factory.chain.block_chain_buffer.add_block(block) if type(status) == bool and not status: logger.info( "[PBB] Failed to add block by add_block, re-requesting the block #%s", blocknumber) logger.info('Skipping one block') continue try: reactor.download_block.cancel() except Exception: pass # Below code is to stop downloading, once we see that we reached to blocknumber that are in pending_blocks # This could be exploited by sybil node, to send blocks in pending_blocks in order to disrupt downloading # TODO: requires a better fix if len(self.factory.chain.block_chain_buffer.pending_blocks ) > 0 and min(self.factory.chain.block_chain_buffer. pending_blocks.keys()) == blocknumber: self.factory.chain.block_chain_buffer.process_pending_blocks() return self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def stake_list_get(self): try: return self.db.get('stake_list') except KeyError: logger.warning('stake_list empty returning empty list') except Exception as e: logger.error('Exception in stake_list_get') logger.exception(e) return []
def next_stake_list_get(self): try: return self.db.get('next_stake_list') except KeyError: pass except Exception as e: logger.error('Exception in next_stake_list_get') logger.exception(e) return []
def state_hrs(self, hrs): try: return self.db.get('hrs{}'.format(hrs)) except KeyError: pass except Exception as e: logger.error('Exception in state_hrs') logger.exception(e) return False
def state_address_used(self, address): # if excepts then address does not exist.. try: return self._get_address_state(address) except KeyError: pass except Exception as e: logger.error('Exception in state_address_used') logger.exception(e) return False
def state_get_txn_count(self, addr): try: return self.db.get( ('txn_count_' + addr)) except KeyError: pass except Exception as e: logger.error('Exception in state_get_txn_count') logger.exception(e) return 0
def state_get_address(self, addr): try: return self.db.get(addr.encode()) except KeyError: pass except Exception as e: logger.error('Exception in state_get_address') logger.exception(e) return [0, 100 * (10**8), []]
def state_balance(self, addr): try: return self.db.get(addr.encode())[1] except KeyError: pass except Exception as e: logger.error('Exception in state_balance') logger.exception(e) return 100 * (10**8)
def state_nonce(self, addr): try: return self.db.get(addr.encode())[0] except KeyError: pass except Exception as e: logger.error('Exception in state_nonce') logger.exception(e) return 0
def stake_list_get(self): try: return self.db.get('stake_list'.encode()) except KeyError: pass except Exception as e: logger.error('Exception in stake_list_get') logger.exception(e) return []
def state_pubhash(self, addr): try: return self.db.get(addr.encode())[2] except KeyError: pass except Exception as e: logger.error('Exception in state_pubhash') logger.exception(e) return []
def state_nonce(self, addr): try: return self.db.get(addr)[0] except KeyError: logger.warning("state_nonce: state not found for %s", addr) except Exception as e: logger.error('Exception in state_nonce') logger.exception(e) return 0
def state_get_address(self, addr): try: return self.db.get(addr) except KeyError: logger.warning('state_get_address: No state found for %s', addr) except Exception as e: logger.error('Exception in state_get_address') logger.exception(e) return [0, 0, []]
def state_get_txn_count(self, addr): try: return self.db.get('txn_count_' + addr) except KeyError: logger.warning('No txn count for %s', addr) except Exception as e: logger.error('Exception in state_get_txn_count') logger.exception(e) return 0
def state_pubhash(self, addr): try: return self.db.get(addr)[2] except KeyError: logger.warning("state_pubhash: state not found for %s", addr) except Exception as e: logger.error('Exception in state_pubhash') logger.exception(e) return []
def state_hrs(self, hrs): try: return self.db.get('hrs' + hrs) except KeyError: logger.warning("state_hrs: state not found for %s", hrs) except Exception as e: logger.error('Exception in state_hrs') logger.exception(e) return False