def dataReceived(self, data): self.factory.recn += 1 self.isJSON = False if data.lower().startswith(b'json '): self.isJSON = True data = data[5:] try: if not self.parse_cmd(self.parse(data)): self.output['status'] = 1 self.output['message'].write( ">>> Command not recognised. Use 'help' for details \r\n") except KeyboardInterrupt as e: self.output['message'] = StringIO() self.output['message'].write( 'Unexpected Error\r\nReport to QRL Developers') logger.error('Unexpected Error WalletProtocol\n') logger.exception(e) self.output['message'] = self.output['message'].getvalue() try: if self.isJSON: self.transport.write('%s' % (str(json.dumps(self.output)), )) else: self.transport.write( bytes(str(self.output['message']), 'utf-8')) except Exception as e: logger.error( 'Walletprotocol unexpected exception while sending msg to client' ) logger.exception(e) pass del self.output self.output = {'status': 1, 'keys': [], 'message': StringIO()}
def f_read_wallet(self): addr_list = [] if os.path.isfile(self.wallet_dat_filename) is False: logger.info('Creating new wallet file... (this could take up to a minute)') seed = None # For AWS test only tmp_seed = self.retrieve_seed_from_mnemonic() if tmp_seed is not None: seed = tmp_seed addr_list.append(self.getnewaddress(SIGNATURE_SIZE, 'XMSS', SEED=seed)) with open(self.wallet_dat_filename, "a") as myfile: # add in a new call to create random_otsmss pickle.dump(addr_list, myfile) while True: try: with open(self.wallet_dat_filename, 'r') as myfile: return pickle.load(myfile) except Exception as e: logger.warning('Wallet.dat corrupted') logger.exception(e) logger.warning('Trying to recover') if self.recover_wallet(): continue logger.error('Failed to Recover Wallet') sys.exit()
def DT(self, data): """ Duplicate Transaction This function processes whenever a Transaction having subtype DT is received. :return: """ try: duplicate_txn = DuplicateTransaction().json_to_transaction(data) except Exception as e: logger.error('DT rejected') logger.exception(e) self.transport.loseConnection() return if not self.factory.master_mr.isRequested(duplicate_txn.get_message_hash(), self): return if duplicate_txn.get_message_hash() in self.factory.chain.duplicate_tx_pool: return tx_state = self.factory.chain.block_chain_buffer.get_stxn_state( blocknumber=self.factory.chain.block_chain_buffer.height() + 1, addr=duplicate_txn.coinbase1.txfrom) # TODO: State validate for duplicate_txn is pending if duplicate_txn.validate_tx(): self.factory.chain.add_tx_to_duplicate_pool(duplicate_txn) else: logger.warning('>>>Invalid DT txn %s', bin2hstr(duplicate_txn.get_message_hash())) return self.factory.register_and_broadcast('DT', duplicate_txn.get_message_hash(), duplicate_txn.to_json()) return
def DT(self, data): """ Duplicate Transaction This function processes whenever a Transaction having subtype DT is received. :return: """ try: duplicate_txn = Transaction.from_json(data) except Exception as e: logger.error('DT rejected') logger.exception(e) self.transport.loseConnection() return if not self.factory.master_mr.isRequested( duplicate_txn.get_message_hash(), self): return if duplicate_txn.get_message_hash( ) in self.factory.buffered_chain.tx_pool.duplicate_tx_pool: return # TODO: State validate for duplicate_txn is pending if duplicate_txn.validate(): self.factory.buffered_chain.tx_pool.add_tx_to_duplicate_pool( duplicate_txn) else: logger.debug('>>>Invalid DT txn %s', bin2hstr(duplicate_txn.get_message_hash())) return self.factory.register_and_broadcast('DT', duplicate_txn.get_message_hash(), duplicate_txn.to_json())
def VE(self, data=None): if not data: version_details = { 'version': config.dev.version_number, 'genesis_prev_headerhash': config.dev.genesis_prev_headerhash } self.transport.write( self.wrap_message('VE', helper.json_encode(version_details))) else: try: data = helper.json_decode(data) self.version = str(data['version']) logger.info('%s version: %s | genesis prev_headerhash %s', self.transport.getPeer().host, data['version'], data['genesis_prev_headerhash']) if data['genesis_prev_headerhash'] == config.dev.genesis_prev_headerhash: return logger.warning('%s genesis_prev_headerhash mismatch', self.identity) logger.warning('Expected: ', config.dev.genesis_prev_headerhash) logger.warning('Found: ', data['genesis_prev_headerhash']) except Exception as e: logger.error('Peer Caused Exception %s', self.identity) logger.exception(e) self.transport.loseConnection() return
def PB(self, data): self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return block = Block.from_json(data) blocknumber = block.blockheader.blocknumber logger.info('>>> Received Block #%d', blocknumber) if blocknumber != self.last_requested_blocknum: logger.warning('Did not match %s %s', self.last_requested_blocknum, self.identity) return if blocknumber > self.factory.chain.height(): if not self.factory.chain.block_chain_buffer.add_block_mainchain( block): logger.warning('PB failed to add block to mainchain') return try: reactor.download_monitor.cancel() except Exception as e: logger.warning("PB: %s", e) self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def verify_root(pub, merkle_root, merkle_path): if not pub: return False if not merkle_root: return False if not merkle_path: return False if len( pub ) == 256: # then LDOTS, need to add this to correctly concat the pub->pubhash pub = [i for sub in pub for i in sub] pubhash = sha256(''.join(pub)) if pubhash not in merkle_path[0]: logger.info('hashed public key not in merkle path') return False for x in range(len(merkle_path)): if len(merkle_path[x]) == 1: if ''.join(merkle_path[x]) == merkle_root: return True else: logger.info('root check failed') return False if sha256(merkle_path[x][0] + merkle_path[x][1]) not in merkle_path[x + 1]: logger.error('path authentication error') return False return False
def VE(self, data=None): """ Version If data is None then sends the version & genesis_prev_headerhash. Otherwise, process the content of data and incase of non matching, genesis_prev_headerhash, it disconnects the odd peer. :return: """ if not data: version_details = { 'version': config.dev.version, 'genesis_prev_headerhash': config.dev.genesis_prev_headerhash } self.transport.write( self.wrap_message('VE', json.dumps(version_details))) else: try: data = json.loads(data) logger.info('%s version: %s | genesis prev_headerhash %s', self.transport.getPeer().host, data['version'], data['genesis_prev_headerhash']) if data['genesis_prev_headerhash'] == config.dev.genesis_prev_headerhash: return logger.warning('%s genesis_prev_headerhash mismatch', self.conn_identity) logger.warning('Expected: %s', config.dev.genesis_prev_headerhash) logger.warning('Found: %s', data['genesis_prev_headerhash']) except Exception as e: logger.error('Peer Caused Exception %s', self.conn_identity) logger.exception(e) self.transport.loseConnection()
def get(self, key_obj): value_obj = self.db.Get(key_obj) try: return json.loads(value_obj)['value'] except KeyError as e: logger.error("Key not found %s", key_obj) logger.exception(e) except Exception as e: logger.exception(e)
def PBB(self, data): """ Push Block Buffer This function executes while syncing block from other peers. Blocks received by this function, directly added into chain.block_chain_buffer. So it is expected to receive multiple of blocks having same blocknumber. :return: """ self.factory.pos.last_pb_time = time.time() try: if self.isNoMoreBlock(data): return data = helper.json_decode(data) blocknumber = int(list(data.keys())[0].encode('ascii')) if blocknumber != self.last_requested_blocknum: logger.info('Blocknumber not found in pending_blocks %s %s', blocknumber, self.conn_identity) return for jsonBlock in data[str(blocknumber)]: block = Block.from_json(json.dumps(jsonBlock)) logger.info('>>>Received Block #%s', block.blockheader.blocknumber) status = self.factory.chain.block_chain_buffer.add_block(block) if type(status) == bool and not status: logger.info( "[PBB] Failed to add block by add_block, re-requesting the block #%s", blocknumber) logger.info('Skipping one block') continue try: reactor.download_block.cancel() except Exception: pass # Below code is to stop downloading, once we see that we reached to blocknumber that are in pending_blocks # This could be exploited by sybil node, to send blocks in pending_blocks in order to disrupt downloading # TODO: requires a better fix if len(self.factory.chain.block_chain_buffer.pending_blocks ) > 0 and min(self.factory.chain.block_chain_buffer. pending_blocks.keys()) == blocknumber: self.factory.chain.block_chain_buffer.process_pending_blocks() return self.factory.pos.randomize_block_fetch(blocknumber + 1) except Exception as e: logger.error( 'block rejected - unable to decode serialised data %s', self.transport.getPeer().host) logger.exception(e) return
def state_get_address(self, addr): try: return self.db.get(addr.encode()) except KeyError: pass except Exception as e: logger.error('Exception in state_get_address') logger.exception(e) return [0, 100 * (10**8), []]
def stake_list_get(self): try: return self.db.get('stake_list'.encode()) except KeyError: pass except Exception as e: logger.error('Exception in stake_list_get') logger.exception(e) return []
def state_get_address(self, addr): try: return self.db.get(addr) except KeyError: logger.warning('state_get_address: No state found for %s', addr) except Exception as e: logger.error('Exception in state_get_address') logger.exception(e) return [0, 0, []]
def state_balance(self, addr): try: return self.db.get(addr.encode())[1] except KeyError: pass except Exception as e: logger.error('Exception in state_balance') logger.exception(e) return 100 * (10**8)
def state_nonce(self, addr): try: return self.db.get(addr.encode())[0] except KeyError: pass except Exception as e: logger.error('Exception in state_nonce') logger.exception(e) return 0
def state_get_txn_count(self, addr): try: return self.db.get('txn_count_' + addr) except KeyError: logger.warning('No txn count for %s', addr) except Exception as e: logger.error('Exception in state_get_txn_count') logger.exception(e) return 0
def state_hrs(self, hrs): try: return self.db.get('hrs' + hrs) except KeyError: logger.warning("state_hrs: state not found for %s", hrs) except Exception as e: logger.error('Exception in state_hrs') logger.exception(e) return False
def state_pubhash(self, addr): try: return self.db.get(addr)[2] except KeyError: logger.warning("state_pubhash: state not found for %s", addr) except Exception as e: logger.error('Exception in state_pubhash') logger.exception(e) return []
def state_nonce(self, addr): try: return self.db.get(addr)[0] except KeyError: logger.warning("state_nonce: state not found for %s", addr) except Exception as e: logger.error('Exception in state_nonce') logger.exception(e) return 0
def state_pubhash(self, addr): try: return self.db.get(addr.encode())[2] except KeyError: pass except Exception as e: logger.error('Exception in state_pubhash') logger.exception(e) return []
def error_msg(self, func_name, blocknum, error=None): if error: logger.error(func_name+' Unknown exception at blocknum: %s', blocknum) logger.error('%s', error) return logger.error('%s blocknum not found in blocks %s', func_name, blocknum) logger.error('Min block num %s', min(self.blocks)) logger.error('Max block num %s', max(self.blocks)) return
def next_stake_list_get(self): try: return self.db.get('next_stake_list') except KeyError: pass except Exception as e: logger.error('Exception in next_stake_list_get') logger.exception(e) return []
def stake_list_get(self): try: return self.db.get('stake_list') except KeyError: logger.warning('stake_list empty returning empty list') except Exception as e: logger.error('Exception in stake_list_get') logger.exception(e) return []
def state_address_used(self, address): # if excepts then address does not exist.. try: return self._get_address_state(address) except KeyError: pass except Exception as e: logger.error('Exception in state_address_used') logger.exception(e) return False
def state_hrs(self, hrs): try: return self.db.get('hrs{}'.format(hrs)) except KeyError: pass except Exception as e: logger.error('Exception in state_hrs') logger.exception(e) return False
def state_get_txn_count(self, addr): try: return self.db.get( ('txn_count_' + addr)) except KeyError: pass except Exception as e: logger.error('Exception in state_get_txn_count') logger.exception(e) return 0
def get(self, key_obj): if not isinstance(key_obj, bytes): key_obj = bytes(key_obj, 'utf-8') value_obj = self.db.Get(key_obj) try: return json.loads(value_obj.decode())['value'] except KeyError as e: logger.error("Key not found %s", key_obj) logger.exception(e) except Exception as e: logger.exception(e)
def state_address_used(self, addr): # if excepts then address does not exist.. try: return self.db.get(addr) except KeyError: logger.warning('state_address_used: address not found %s', addr) except Exception as e: logger.error('Exception in state_address_used') logger.exception(e) return False
def get_txn_count(self, addr): try: return self._db.get((b'txn_count_' + addr)) except KeyError: pass except Exception as e: # FIXME: Review logger.error('Exception in get_txn_count') logger.exception(e) return 0
def address_used(self, address: bytes): # FIXME: Probably obsolete try: return self._get_address_state(address) except KeyError: return False except Exception as e: # FIXME: Review logger.error('Exception in address_used') logger.exception(e) raise