def menuHandler_DeleteNetworkObserver(error_raised=False): global initialized_NetworkObserver_configurations if error_raised: print(makePrettyUiLine('', enclosing=True)) print(makePrettyUiLine('')) print(makePrettyUiLine('A typing error was raised, please try again')) print(makePrettyUiLine('')) print(makePrettyUiLine('')) print(makePrettyUiLine('Fill out the following parameter')) print(makePrettyUiLine('')) input_observer_identifier = input("* //int// Observer identifier: ") observer_exists = False selected_observer = None for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: if int(input_observer_identifier ) == NetworkObserver.observer_identifier: observer_exists = True selected_observer = NetworkObserver if not observer_exists: logPretty( 'Observer does not exist on disk or has not been loaded into memory' ) menuHandler_UpdateNetworkObserver(error_raised=True) if observer_exists: initialized_NetworkObserver_configurations.deleteExistingNetworkObserver( selected_observer.observer_identifier)
def fetchTransactionsForBlock(self, block_height): from helpers import getTimestampSeconds # logPretty('Attempting to fetch transactions for blockHeight {} from NetworkObserver {}'.format(block_height, self.ip_address)) temp_res = requests.get(self.base_url + 'transactionSearch?blockHeight=' + str(block_height)) if temp_res.status_code == 200: try: # logPretty('transactionSearch fetch successful for blockHeight {} from NetworkObserver {}'.format(block_height, self.ip_address)) self.last_successful_transaction_fetch_timestamp_seconds = getTimestampSeconds( ) response_decoded = json.loads(temp_res.content.decode('utf-8')) if len(response_decoded['errors']) == 0: for transaction in response_decoded['result']: self.last_seen_transaction_blocks.append(transaction) else: self.last_seen_transaction_blocks = [] raise ValueError except Exception as e: logPretty( '{} - Failed to decode JSON from successful transactionSearch fetch' .format(e), color=colorPrint.RED) self.last_failed_transaction_fetch_timestamp_seconds = getTimestampSeconds( ) else: logPretty( 'Failed to fetch transactionSearch from NetworkObserver {}'. format(self.ip_address), color=colorPrint.RED) self.last_failed_transaction_fetch_timestamp_seconds = getTimestampSeconds( )
def deleteExistingNetworkObserver(self, observer_identifier): with open('stored_NetworkObservers', 'r') as f: existing = ast.literal_eval(f.readline()) for i in existing: if i['observer_identifier'] == observer_identifier: existing.pop(existing.index(i)) new = existing with open('stored_NetworkObservers', 'w') as f: f.write(json.dumps(new)) logPretty('Successfully deleted NetworkObserver {}'.format(observer_identifier)) self.loadStoredNetworkObserversFromDisk()
def updateExistingNetworkObserver(self, observer_identifier, new_configuration_dict): with open('stored_NetworkObservers', 'r') as f: existing = ast.literal_eval(f.readline()) for i in existing: if i['observer_identifier'] == observer_identifier: existing.pop(existing.index(i)) existing.append(new_configuration_dict); new = existing with open('stored_NetworkObservers', 'w') as f: f.write(json.dumps(new)) logPretty('Successfully replaced existing configurations for NetworkObserver {}'.format(observer_identifier)) self.loadStoredNetworkObserversFromDisk()
def addTransactionToDatabase(transaction_dict): global mongoClient global mongoDatabase global mongoCollectionTransactions try: # omitted insertion values = typeValue dict logPretty('Adding transaction to database tx_id: {}'.format( transaction_dict['transactionNyzoString'])) return mongoCollectionTransactions.insert_one({ 'run_id': transaction_dict['run_id'], 'amt_compliant_nodes': transaction_dict['amt_compliant_nodes'], 'amt_defiant_nodes': transaction_dict['amt_defiant_nodes'], 'total_deviations_from_highest_FrozenEdge': transaction_dict['total_deviations_from_highest_FrozenEdge'], 'total_blocks_with_deviations': transaction_dict['total_blocks_with_deviations'], 'transactions_skipped': transaction_dict['transactions_skipped'], 'adjusted_blocks_with_deviations': transaction_dict['adjusted_blocks_with_deviations'], 'height': transaction_dict['height'], 'timestampMilliseconds': transaction_dict['timestampMilliseconds'], 'type': transaction_dict['type'], 'amount': float(transaction_dict['amount'] [1:]), # the first character returned by the api is n 'receiverIdentifier': transaction_dict['receiverIdentifier'], 'senderIdentifier': transaction_dict['senderIdentifier'], 'previousHashHeight': transaction_dict['previousHashHeight'], 'senderData': transaction_dict['senderData'], 'senderDataBytes': transaction_dict['senderDataBytes'], 'transactionNyzoString': transaction_dict['transactionNyzoString'], }) except KeyError as e: logPretty( '{} - addTransactionToDatabase failed to get value of transaction dict item' .format(e), color=colorPrint.RED)
def fetchFrozenEdge(self): from helpers import getTimestampSeconds logPretty( 'Attempting to fetch frozenEdgeHeight for NetworkObserver {}'. format(self.ip_address)) temp_res = requests.get(self.base_url + 'frozenEdge') if temp_res.status_code == 200: try: logPretty( 'frozenEdgeHeight fetch successful - NetworkObserver {}'. format(self.ip_address)) self.last_seen_frozenEdgeHeight = json.loads( temp_res.content.decode('utf-8'))['result'][0]['height'] self.last_successful_frozenEdge_fetch_timestamp_seconds = getTimestampSeconds( ) except: logPretty( 'Failed to decode JSON from successful frozenEdgeHeight fetch', color=colorPrint.RED) self.last_failed_frozenEdge_fetch_timestamp_seconds = getTimestampSeconds( ) else: logPretty( 'Failed to fetch frozenEdgeHeight from NetworkObserver {}'. format(self.ip_address), color=colorPrint.RED) self.last_failed_frozenEdge_fetch_timestamp_seconds = getTimestampSeconds( )
def __init__(self): with open('stored_Configurations','r') as f: self.dict_config = ast.literal_eval(f.readline()) self.showGuiOnStartup = ast.literal_eval(self.dict_config['showGuiOnStartup']) self.version = self.dict_config['version'] self.amount_of_network_observers_compliant_minimum_percentage = self.dict_config['amount_of_network_observers_compliant_minimum_percentage'] # both in and outgoing are saved if enabled and listed in specificAddressListRaw self.storeSpecificAddressTransactions = ast.literal_eval(self.dict_config['storeSpecificAddressTransactions']) self.specificAddressListRaw = self.dict_config['specificAddressListRaw'] if self.storeSpecificAddressTransactions and len(self.specificAddressListRaw) == 0: logPretty('storeSpecificAddressTransactions is enabled in the stored_Configurations file but no\n' 'addresses were given for which transactions have to be saved.\n' 'This would make running the application pointless, please disable the parameter or add an address to the list.', color=colorPrint.RED) exit() if self.storeSpecificAddressTransactions and len(self.specificAddressListRaw) > 0: logPretty('storeSpecificAddressTransactions is enabled and only transactions sent to the given set of\n' 'raw Nyzo address identifiers will be saved.\n\n>>>> NyzoString identifiers WILL NOT WORK! <<<<', color=colorPrint.YELLOW)
def addNewNetworkObserver(self, ip_address, save_permanently=True, consider_missing_blocks=True, consider_frozen_edge_discrepancy=True, consider_fetching_reliability=True, chunk_size_missing_blocks=30, failed_fetch_minimum_seconds_passed=350, allowed_frozenEdge_sync_discrepancy=5,url_prepend='http://', url_append='/api/', existing_observer_identifier=None): IdForNetworkObserver = self.getAmountOfStoredNetworkObserversFromDisk() # 0-index if existing_observer_identifier is not None: IdForNetworkObserver = existing_observer_identifier new_observer = NetworkObserver(IdForNetworkObserver, ip_address, consider_missing_blocks, consider_frozen_edge_discrepancy, consider_fetching_reliability, chunk_size_missing_blocks, failed_fetch_minimum_seconds_passed, allowed_frozenEdge_sync_discrepancy, url_prepend, url_append) self.loadedNetworkObservers.append(new_observer) logPretty('Successfully loaded NetworkObserver {} from disk into loadedNetworkObservers'.format(IdForNetworkObserver)) if save_permanently: self.saveNewNetworkObserver({ 'observer_identifier': IdForNetworkObserver, 'ip_address': ip_address, 'consider_missing_blocks': consider_missing_blocks, 'consider_frozen_edge_discrepancy': consider_frozen_edge_discrepancy, 'consider_fetching_reliability': consider_fetching_reliability, 'chunk_size_missing_blocks': chunk_size_missing_blocks, 'failed_fetch_minimum_seconds_passed': failed_fetch_minimum_seconds_passed, 'allowed_frozenEdge_sync_discrepancy': allowed_frozenEdge_sync_discrepancy, 'url_prepend': url_prepend, 'url_append': url_append }) self.amount_of_network_observers+=1
def getTransactionsFromDatabase(filter_value, filter_type='blockHeight'): global mongoClient global mongoDatabase global mongoCollectionTransactions transaction_list = [] if filter_type == 'blockHeight': query = {'height': filter_value} elif filter_type == 'transactionNyzoString': query = {'transactionNyzoString': filter_value} elif filter_type == 'timestampMilliseconds': # TODO ideally this should be AROUND a timestamp with leniency query = {'timestampMilliseconds': filter_value} else: logPretty( 'Invalid filter_type, returning empty list - Mongo.getTransactionFromDatabase', color=colorPrint.RED) return [] res = mongoCollectionTransactions.find(query).sort([('height', -1)]) for transaction in res: transaction_list.append(transaction) return transaction_list
def addEventToDatabase(event_dict): global mongoClient global mongoDatabase global mongoCollectionEvents try: logPretty('Adding event to database run_id: {}'.format( event_dict['run_id'])) return mongoCollectionEvents.insert_one({ 'timestamp': getTimestampSeconds(), 'run_id': event_dict['run_id'], 'compliant_nodes_ids': event_dict['compliant_nodes_ids'], 'defiant_nodes_ids': event_dict['defiant_nodes_ids'], 'amt_transactions_processed': event_dict['amt_transactions_added'] }) except KeyError as e: logPretty( '{} - addEventToDatabase failed to get value of event dict item'. format(e), color=colorPrint.RED)
def initiate_MainLoop(): global amount_of_loops amount_of_loops += 1 logPretty('Initiating loop {}'.format(str(amount_of_loops))) from time import sleep sleep(7) #- used to temporarily store the frozen edge results frozenEdge_fetches = [] #- generate a new run_id and timestamp #- query the frozen edge from each individual network observer for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: NetworkObserver.discardPreviousRunTransactions( ) # previous run's transactions are discarded to start fresh NetworkObserver.assignNewRunId() fetch_timestamp = getTimestampSeconds() NetworkObserver.fetchFrozenEdge() frozenEdge_fetches.append({ 'observer_identifier': NetworkObserver.observer_identifier, 'ip_address': NetworkObserver.ip_address, 'last_seen_frozenEdgeHeight': NetworkObserver.last_seen_frozenEdgeHeight, 'last_failed_frozenEdge_fetch_timestamp_seconds': NetworkObserver.last_failed_frozenEdge_fetch_timestamp_seconds, 'last_successful_frozenEdge_fetch_timestamp_seconds': NetworkObserver.last_successful_frozenEdge_fetch_timestamp_seconds, 'failed_fetch_minimum_seconds_passed': NetworkObserver.failed_fetch_minimum_seconds_passed, 'timestamp_problematic': False, 'fetch_timestamp': fetch_timestamp, 'deviation_from_highest_found': None, 'deviation_problematic': False, 'consider_frozen_edge_discrepancy': NetworkObserver.consider_frozen_edge_discrepancy, 'allowed_frozenEdge_sync_discrepancy': NetworkObserver.allowed_frozenEdge_sync_discrepancy, }) #- uses the temporary results to assert highest found frozenEdgeHeight highest_frozenEdgeHeight = 0 for frozenEdge_fetch in frozenEdge_fetches: if frozenEdge_fetch[ 'last_seen_frozenEdgeHeight'] > highest_frozenEdgeHeight: highest_frozenEdgeHeight = frozenEdge_fetch[ 'last_seen_frozenEdgeHeight'] #- assert problematic deviation in regards to frozenEdgeHeight for frozenEdge_fetch in frozenEdge_fetches: if frozenEdge_fetch['consider_frozen_edge_discrepancy']: logPretty( 'Checking if frozenEdgeHeight deviates per the configured allowed_frozenEdge_sync_discrepancy for NetworkObserver {}' .format(frozenEdge_fetch['ip_address'])) if (highest_frozenEdgeHeight - frozenEdge_fetch['last_seen_frozenEdgeHeight'] ) > frozenEdge_fetch['allowed_frozenEdge_sync_discrepancy']: frozenEdge_fetch['deviation'] = ( highest_frozenEdgeHeight - frozenEdge_fetch['last_seen_frozenEdgeHeight']) frozenEdge_fetch['deviation_problematic'] = True logPretty( 'frozenEdgeHeight out of boundaries with deviation={} for NetworkObserver {}' .format(frozenEdge_fetch['deviation'], frozenEdge_fetch['ip_address']), color=colorPrint.RED) else: frozenEdge_fetch['deviation'] = ( highest_frozenEdgeHeight - frozenEdge_fetch['last_seen_frozenEdgeHeight']) logPretty( 'frozenEdgeHeight NOT out of boundaries with deviation={} for NetworkObserver {} ' .format(frozenEdge_fetch['deviation'], frozenEdge_fetch['ip_address'])) else: logPretty( 'Disregarding frozenEdgeHeight discrepancy check due to configuration of consider_frozen_edge_discrepancy for NetworkObserver {}' .format(frozenEdge_fetch['ip_address']), color=colorPrint.YELLOW) # assert if timestamp is problematic for frozenEdge_fetch in frozenEdge_fetches: logPretty( 'Checking if last failed frozenEdge fetch resides far enough in history per the configurations for NetworkObserver {}' .format(frozenEdge_fetch['ip_address'])) if (frozenEdge_fetch['last_failed_frozenEdge_fetch_timestamp_seconds'] + frozenEdge_fetch['failed_fetch_minimum_seconds_passed'] ) > frozenEdge_fetch['fetch_timestamp']: logPretty( 'Last failed frozenEdge fetch NOT old enough per the configuration minimum for NetworkObserver {}' .format(frozenEdge_fetch['ip_address'], color=colorPrint.RED)) frozenEdge_fetch['timestamp_problematic'] = True else: logPretty('Timestamp compliant for NetworkObserver {}'.format( frozenEdge_fetch['ip_address'])) # push the first assertions to NetworkObserver for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: for frozenEdge_fetch in frozenEdge_fetches: if NetworkObserver.observer_identifier == frozenEdge_fetch[ 'observer_identifier']: NetworkObserver.frozenEdge_deviation = frozenEdge_fetch[ 'deviation'] if frozenEdge_fetch['deviation_problematic']: NetworkObserver.frozenEdge_in_sync = False logPretty( 'FrozenEdge considered not in sync for NetworkObserver {}' .format(NetworkObserver.ip_address), color=colorPrint.YELLOW) else: NetworkObserver.frozenEdge_in_sync = True logPretty( 'FrozenEdge considered in sync for NetworkObserver {}'. format(NetworkObserver.ip_address)) if frozenEdge_fetch['timestamp_problematic']: NetworkObserver.frozenEdge_fetching_reliable = False logPretty( 'FrozenEdge fetching considered unreliable for NetworkObserver {}' .format(NetworkObserver.ip_address), color=colorPrint.YELLOW) else: NetworkObserver.frozenEdge_fetching_reliable = True logPretty( 'FrozenEdge fetching considered reliable for NetworkObserver {}' .format(NetworkObserver.ip_address)) # temporary list used to store timestamp results of transaction fetches, used to determine problematic fetching behavior after_blockFetches = [] # use these assertions to determine if we want to try and fetch transactions from the nodes for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: if NetworkObserver.frozenEdge_in_sync and NetworkObserver.frozenEdge_fetching_reliable: # the heights for transaction fetching are determined according to a network observer's frozenEdgeHeight height_start = NetworkObserver.last_seen_frozenEdgeHeight - NetworkObserver.chunk_size_missing_blocks height_end = NetworkObserver.last_seen_frozenEdgeHeight logPretty( 'Starting fetching of transactions frozenEdge range({} - {}) - NetworkObserver {}' .format(height_start, height_end, NetworkObserver.ip_address)) block_heights_fetch_initiated = [] fetch_timestamp = getTimestampSeconds() for blockHeight in range(height_start, height_end + 1): #logPretty('Fetching transactions for blockHeight {} - NetworkObserver {}'.format(blockHeight, NetworkObserver.ip_address)) block_heights_fetch_initiated.append(blockHeight) NetworkObserver.fetchTransactionsForBlock(blockHeight) logPretty( 'Transaction fetching finished for NetworkObserver {}'.format( NetworkObserver.ip_address)) after_blockFetches.append({ 'observer_identifier': NetworkObserver.observer_identifier, 'ip_address': NetworkObserver.ip_address, 'last_failed_transaction_fetch_timestamp_seconds': NetworkObserver. last_failed_transaction_fetch_timestamp_seconds, 'last_successful_transaction_fetch_timestamp_seconds': NetworkObserver. last_successful_transaction_fetch_timestamp_seconds, 'fetch_timestamp': fetch_timestamp, 'failed_fetch_minimum_seconds_passed': NetworkObserver.failed_fetch_minimum_seconds_passed, 'block_fetching_reliable': None, 'missing_blocks_in_chunk': False # }) else: logPretty( 'Skipping transaction fetching for NetworkObserver {}'.format( NetworkObserver.ip_address), color=colorPrint.YELLOW) # assert if timestamp is problematic for blockFetch in after_blockFetches: logPretty( 'Checking if last failed transaction fetch resides far enough in history per the configurations for NetworkObserver {}' .format(blockFetch['ip_address'])) if (blockFetch['last_failed_transaction_fetch_timestamp_seconds'] + blockFetch['failed_fetch_minimum_seconds_passed'] ) > blockFetch['fetch_timestamp']: logPretty( 'Last failed transaction fetch NOT old enough per the configuration minimum for NetworkObserver {}' .format(blockFetch['ip_address'], color=colorPrint.RED)) blockFetch['block_fetching_reliable'] = False else: logPretty('Timestamp compliant for NetworkObserver {}'.format( blockFetch['ip_address'])) blockFetch['block_fetching_reliable'] = True # push states to NetworkObserver for blockFetch in after_blockFetches: for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: if blockFetch[ 'observer_identifier'] == NetworkObserver.observer_identifier: NetworkObserver.block_fetching_reliable = blockFetch[ 'block_fetching_reliable'] NetworkObserver.missing_blocks_in_chunk = blockFetch[ 'missing_blocks_in_chunk'] # not assigned, could be used in future # depending on consider_ configurations, we filter nodes compliant_NetworkObserver_identifiers = [] defiant_NetworkObserver_identifiers = [] for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: compliant = True if NetworkObserver.consider_missing_blocks: if NetworkObserver.missing_blocks_in_chunk: compliant = False if NetworkObserver.consider_frozen_edge_discrepancy: if not NetworkObserver.frozenEdge_in_sync: compliant = False if NetworkObserver.consider_fetching_reliability: if not NetworkObserver.frozenEdge_fetching_reliable: compliant = False if not NetworkObserver.block_fetching_reliable: compliant = False if not compliant: logPretty('NetworkObserver {} - NOT fully compliant'.format( NetworkObserver.ip_address), color=colorPrint.RED) defiant_NetworkObserver_identifiers.append( NetworkObserver.observer_identifier) if compliant: logPretty('NetworkObserver {} - fully compliant'.format( NetworkObserver.ip_address)) compliant_NetworkObserver_identifiers.append( NetworkObserver.observer_identifier) # percentage comparison fully compliant nodes minimum_compliance_percentage = initialized_configurations.amount_of_network_observers_compliant_minimum_percentage actual_compliance_percentage = 100 / ( len(compliant_NetworkObserver_identifiers) + len(defiant_NetworkObserver_identifiers) ) * len(compliant_NetworkObserver_identifiers) tx_insertion_allowed = False if actual_compliance_percentage >= minimum_compliance_percentage: logPretty( 'Minimum percentage of compliant network observers ({}%) has been met = {}%' .format(minimum_compliance_percentage, actual_compliance_percentage)) tx_insertion_allowed = True if not tx_insertion_allowed: logPretty( 'No transactions (from both compliant and defiant NetworkObservers) will be added to the database due to the minimum of compliant network observers not being met', color=colorPrint.RED) # insert this event too # insert the transactions into the database if the minimum amount of compliant network observers is met # only insert transactions from compliant nodes # transactions are inserted once into a temporary list and handled further down below transactionsForDatabase = [] if tx_insertion_allowed: for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: if NetworkObserver.observer_identifier in compliant_NetworkObserver_identifiers: relevant_transactions = NetworkObserver.last_seen_transaction_blocks for transaction in relevant_transactions: transactionsForDatabase.append(transaction) else: logPretty( 'Transactions will not be processed for defiant NetworkObserver {}' .format(NetworkObserver.ip_address)) else: logPretty( 'Transactions will not be processed for all NetworkObservers due to the minimum amount of compliant nodes not being met', color=colorPrint.RED) # only unique transactions will be added to the database logPretty('Amount of transactions before uniqueness filter: {}'.format( len(transactionsForDatabase))) transactionsUniqueForDatabase = [] for transaction in transactionsForDatabase: if checkIfTransactionInDatabase( transaction['transactionNyzoString']) is False: transactionsUniqueForDatabase.append(transaction) #remove duplicates in transactionsUniqueForDatabase deduplication_txs = set() deduplicated_transactionForDatabase = [] for tx in transactionsUniqueForDatabase: try: if tx['transactionNyzoString'] not in deduplication_txs: deduplication_txs.add(tx['transactionNyzoString']) else: raise KeyError except Exception as e: continue deduplicated_transactionForDatabase.append(tx) transactionsUniqueForDatabase = deduplicated_transactionForDatabase # some variables are fetched from NetworkObservers # this is custom data which will be added to the transaction dict below current_run_id = '' amt_compliant_nodes = len(compliant_NetworkObserver_identifiers) amt_defiant_nodes = len(defiant_NetworkObserver_identifiers) for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: current_run_id = NetworkObserver.rolling_run_ids[4][ 0] # the last one we appended # the unique transactions are added to the database, some custom data is added to the transaction dict logPretty('Amount of transactions after uniqueness filter: {}'.format( len(transactionsUniqueForDatabase))) if initialized_configurations.storeSpecificAddressTransactions: logPretty( 'storeSpecificAddressTransactions is enabled, not all transactions will be saved!', color=colorPrint.YELLOW) # amount_of_irrelevant_transactions = 0 # this pertains to the storeSpecificAddressTransactions filtration highest_frozenEdge_deviation = 0 blocks_with_deviations = [] for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: if NetworkObserver.frozenEdge_deviation > highest_frozenEdge_deviation: highest_frozenEdge_deviation = NetworkObserver.frozenEdge_deviation final_transactionsForDatabase = [] for transaction in transactionsUniqueForDatabase: # consider data homogeneity in terms of transactions, this uses the amount of compliant nodes seen_by_networkobservers = 0 for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: curr_txs = NetworkObserver.last_seen_transaction_blocks for tx in curr_txs: if tx['transactionNyzoString'] in transaction[ 'transactionNyzoString']: seen_by_networkobservers += 1 if seen_by_networkobservers < amt_compliant_nodes: if transaction['height'] not in blocks_with_deviations: blocks_with_deviations.append(transaction['height']) ship_to_database = False if initialized_configurations.storeSpecificAddressTransactions: if transaction[ 'receiverIdentifier'] in initialized_configurations.specificAddressListRaw or transaction[ 'senderIdentifier'] in initialized_configurations.specificAddressListRaw: logPretty( 'receiverIdentifier or senderIdentifier matches an address specified in specificAddressListRaw' ) ship_to_database = True else: amount_of_irrelevant_transactions += 1 else: ship_to_database = True if ship_to_database: transaction['run_id'] = current_run_id transaction['amt_compliant_nodes'] = amt_compliant_nodes transaction['amt_defiant_nodes'] = amt_defiant_nodes final_transactionsForDatabase.append(transaction) if amount_of_irrelevant_transactions > 0: logPretty( 'Amount of transactions skipped due to storeSpecificAddressTransactions: {}' .format(amount_of_irrelevant_transactions)) logPretty( 'Total block deviations from highest frozenEdgeHeight: {}'.format( highest_frozenEdge_deviation)) logPretty( 'Total of adjusted blocks with transaction deviations: {}'.format( len(blocks_with_deviations) / 2)) if (len(blocks_with_deviations) / 2) > highest_frozenEdge_deviation: logPretty( 'The amount of blocks for which the transaction content differs shouldn\'t exceed {} but {} was found!' .format(highest_frozenEdge_deviation, (len(blocks_with_deviations) / 2)), color=colorPrint.RED) for tx in final_transactionsForDatabase: tx['total_deviations_from_highest_FrozenEdge'] = highest_frozenEdge_deviation tx['total_blocks_with_deviations'] = len(blocks_with_deviations) tx['adjusted_blocks_with_deviations'] = len(blocks_with_deviations) / 2 tx['transactions_skipped'] = amount_of_irrelevant_transactions addTransactionToDatabase(tx)
def menuHandler_UpdateNetworkObserver(error_raised=False): global initialized_NetworkObserver_configurations if error_raised: print(makePrettyUiLine('', enclosing=True)) print(makePrettyUiLine('')) print(makePrettyUiLine('A typing error was raised, please try again')) print( makePrettyUiLine('To view the main menu, use the "main" command')) print(makePrettyUiLine('')) print(makePrettyUiLine('')) print( makePrettyUiLine( 'Fill out the following parameters, press [ENTER] to use the current value' )) print(makePrettyUiLine('')) input_observer_identifier = input("* //int// Observer identifier: ") observer_exists = False selected_observer = None for NetworkObserver in initialized_NetworkObserver_configurations.loadedNetworkObservers: if int(input_observer_identifier ) == NetworkObserver.observer_identifier: observer_exists = True selected_observer = NetworkObserver if not observer_exists: logPretty( 'Observer does not exist on disk or has not been loaded into memory' ) menuHandler_UpdateNetworkObserver(error_raised=True) if observer_exists: logPretty('Confirmed that observer exists and is loaded into memory') input_ip_address = input('* //string// IP address [{}]: '.format( selected_observer.ip_address)) input_consider_missing_blocks = input( '* //boolean// Consider missing blocks [{}]: '.format( selected_observer.consider_missing_blocks)) input_consider_frozen_edge_discrepancy = input( '* //boolean// Consider frozen edge discrepancy [{}]: '.format( selected_observer.consider_frozen_edge_discrepancy)) input_consider_fetching_reliability = input( '* //boolean// Consider fetching reliability [{}]: '.format( selected_observer.consider_fetching_reliability)) input_chunk_size_missing_blocks = input( '* //int// Chunk size missing blocks [{}]: '.format( selected_observer.chunk_size_missing_blocks)) input_failed_fetch_minimum_seconds_passed = input( '* //int// Failed fetch minimum seconds passed [{}]: '.format( selected_observer.failed_fetch_minimum_seconds_passed)) input_allowed_frozenEdge_sync_discrepancy = input( '* //int// Allowed frozenEdge sync discrepancy [{}]: '.format( selected_observer.allowed_frozenEdge_sync_discrepancy)) input_url_prepend = input('* //string// URL prepend [{}]: '.format( selected_observer.url_prepend)) input_url_append = input('* //string// URL append [{}]: '.format( selected_observer.url_append)) try: if len(input_ip_address) > 0: if len(input_ip_address.split('.')) == 4: input_ip_address = str(input_ip_address) else: raise TypeError else: input_ip_address = selected_observer.ip_address if len(input_consider_missing_blocks) > 0: input_consider_missing_blocks = ast.literal_eval( input_consider_missing_blocks) else: input_consider_missing_blocks = selected_observer.consider_missing_blocks if len(input_consider_frozen_edge_discrepancy) > 0: input_consider_frozen_edge_discrepancy = ast.literal_eval( input_consider_frozen_edge_discrepancy) else: input_consider_frozen_edge_discrepancy = selected_observer.consider_frozen_edge_discrepancy if len(input_consider_fetching_reliability) > 0: input_consider_fetching_reliability = ast.literal_eval( input_consider_fetching_reliability) else: input_consider_fetching_reliability = selected_observer.consider_fetching_reliability if len(input_chunk_size_missing_blocks) > 0: input_chunk_size_missing_blocks = int( input_chunk_size_missing_blocks) else: input_chunk_size_missing_blocks = selected_observer.chunk_size_missing_blocks if len(input_failed_fetch_minimum_seconds_passed) > 0: input_failed_fetch_minimum_seconds_passed = int( input_failed_fetch_minimum_seconds_passed) else: input_failed_fetch_minimum_seconds_passed = selected_observer.failed_fetch_minimum_seconds_passed if len(input_allowed_frozenEdge_sync_discrepancy) > 0: input_allowed_frozenEdge_sync_discrepancy = int( input_allowed_frozenEdge_sync_discrepancy) else: input_allowed_frozenEdge_sync_discrepancy = selected_observer.allowed_frozenEdge_sync_discrepancy if len(input_url_prepend) > 0: if 'http' in input_url_prepend: input_url_prepend = str(input_url_prepend) else: raise TypeError else: input_url_prepend = selected_observer.url_prepend if len(input_url_append) > 0: if '/' in input_url_append: input_url_append = str(input_url_append) else: raise TypeError else: input_url_append = selected_observer.url_append logPretty( 'Successfully validated all new parameters for NetworkObserver' ) initialized_NetworkObserver_configurations.updateExistingNetworkObserver( int(selected_observer.observer_identifier), { "observer_identifier": selected_observer.observer_identifier, "ip_address": input_ip_address, "consider_missing_blocks": str(input_consider_missing_blocks).capitalize(), "consider_frozen_edge_discrepancy": str(input_consider_frozen_edge_discrepancy).capitalize(), "consider_fetching_reliability": str(input_consider_fetching_reliability).capitalize(), "chunk_size_missing_blocks": input_chunk_size_missing_blocks, "failed_fetch_minimum_seconds_passed": input_failed_fetch_minimum_seconds_passed, "allowed_frozenEdge_sync_discrepancy": input_allowed_frozenEdge_sync_discrepancy, "url_prepend": input_url_prepend, "url_append": input_url_append }) except: menuHandler_UpdateNetworkObserver(error_raised=True)
logPretty( 'The amount of blocks for which the transaction content differs shouldn\'t exceed {} but {} was found!' .format(highest_frozenEdge_deviation, (len(blocks_with_deviations) / 2)), color=colorPrint.RED) for tx in final_transactionsForDatabase: tx['total_deviations_from_highest_FrozenEdge'] = highest_frozenEdge_deviation tx['total_blocks_with_deviations'] = len(blocks_with_deviations) tx['adjusted_blocks_with_deviations'] = len(blocks_with_deviations) / 2 tx['transactions_skipped'] = amount_of_irrelevant_transactions addTransactionToDatabase(tx) # the events for the network observers are added to the database if __name__ == "__main__": amount_of_loops = 0 initializeMongo() initialized_configurations = Configurations() initialized_NetworkObserver_configurations = NetworkObserverConfigurations( amount_of_network_observers_compliant_minimum_percentage= initialized_configurations. amount_of_network_observers_compliant_minimum_percentage) if initialized_configurations.showGuiOnStartup: showUiStart(initialized_configurations.version) else: logPretty('showGuiOnStartup has been disabled, initiating main loop') while True: initiate_MainLoop()
def getAmountOfStoredNetworkObserversFromDisk(self): with open('stored_NetworkObservers', 'r') as f: logPretty('Fetching the amount of stored Network Observers on disk') return len(ast.literal_eval(f.readline()))
def assignNewRunId(self): from helpers import getTimestampSeconds, generateRunId self.rolling_run_ids.pop(0) self.rolling_run_ids.append([generateRunId(), getTimestampSeconds()]) logPretty('Generating new run id and timestamp for NetworkObserver {}'. format(self.ip_address))