def noncesRelatedToBitiodineAddresses(self, caddresses, ctag): filem = UtilFileManager() arqName = "ClusterNoncesOutput_" + self.clusterType + "_" + str(ctag) countFindings = 0 blockNumber = 0 blockchain = Blockchain( os.path.expanduser(sp.configBlockchainPath + 'blocks')) for block in blockchain.get_ordered_blocks( os.path.expanduser(sp.configBlockchainPath + "blocks/index"), start=0): blockNumber = blockNumber + 1 nonce = block.header.nonce transaction = block.transactions[0] #Get outputs from coinbase transaction for output in transaction.outputs: #Get addresses for outAddr in output.addresses: for strAddr in caddresses: if outAddr._address == strAddr: #save that nonce filem.saveInFile(arqName, nonce) self.append(nonce) countFindings = countFindings + 1 if countFindings > 0: scalc = Statistics() scalc.printStatistics("Nonces", arqName, filem) return countFindings
def analyzeBlockSciCluster(self, data, ctag, addressType): filem = UtilFileManager() arqName = "ClusterLSBAddressOutput_" + self.clusterType + "_" + str( ctag) + addressType.name + ".data" countBits = 0 byteStr = "" byteOutput = 0 addressesIterator = data.addresses for ad in addressesIterator.with_type(addressType): address = ad.address_string countBits = countBits + 1 if (self.extractLSBfromAddress(address) == 1): byteOutput = (byteOutput << 1) + 1 else: byteOutput = (byteOutput << 1) if (countBits == 8): #save this in respective file filem.saveByte(arqName, byteOutput) byteStr = byteStr + char(byteOutput) byteOutput = 0 countBits = 0 #compute statistics if len(byteStr) != 0: scalc = Statistics() scalc.printStatistics("LSB Analyzer", arqName + "pubkey.data", filem, byteStr) self.keepExtractedData(arqName, byteStr, filem)
def test_exercise(): statistics = Statistics() statistics.add_number(3) assert statistics.get_count() == 1 statistics.add_number(5) statistics.add_number(1) statistics.add_number(2) assert statistics.get_count() == 4 assert statistics.sum == 11 assert statistics.average() == 2.75
def __init__(self, producer_count, alpha, beta, device_count, lambda_param, buffer_size): self.__producers = [ Producer(i, alpha, beta) for i in range(producer_count) ] self.__devices = [Device(lambda_param) for _ in range(device_count)] self.__current_device = 0 self.__buffer = Buffer(buffer_size) self.__alpha = alpha self.__beta = beta self.__lambda = lambda_param self.__stat = Statistics(producer_count, device_count) self.__creation_log = [] self.__setting_log = [] self.__event_log = [] self.__release_log = [] self.__deny_log = [] self.__buffer_log = []
def analyzeBitIodineOrEtherclustCluster(self, data, ctag): filem = UtilFileManager() arqName = "ClusterLSBAddressOutput_" + self.clusterType + "_" + str( ctag) countBits = 0 byteStr = "" byteOutput = 0 #compute data bytes for address in data: countBits = countBits + 1 if (self.extractLSBfromBitiodineAddress(address) == 1): byteOutput = (byteOutput << 1) + 1 else: byteOutput = (byteOutput << 1) if (countBits == 8): #save this in respective file filem.saveByte(arqName, byteOutput) byteStr = byteStr + chr(byteOutput) #or .decode("utf-8") byteOutput = 0 countBits = 0 if len(byteStr) != 0: #compute statistics savedData = filem.openByteFile(arqName) scalc = Statistics() pvalue = scalc.monobitTest(savedData) print("\t\t\'Message\':" + byteStr) print("\t\tAM:" + str(scalc.computeAM(savedData))) print("\t\tEntropy:" + str(scalc.computeEntropy(savedData))) if pvalue < 0.01: monobitresult = 1 else: monobitresult = 0 print("\t\tMonobit test (p-value):" + str(pvalue) + ", PASS:" + str(monobitresult)) self.keepExtractedData(arqName, byteStr, filem)
def analyzeSequentialAddresses(self, data, ctag): filem = UtilFileManager() print("Analyzing Addresses...") arqName = "SequentialLSBAddressOutput_0.data" countBits = 0 byteOutput = 0 countAddresses = 0 countChunk = 0 #compute data bytes for address in data: countBits = countBits + 1 if (self.extractLSBfromAddress(address) == 1): byteOutput = (byteOutput << 1) + 1 else: byteOutput = (byteOutput << 1) if (countBits == 8): #save this in respective file filem.saveByte(arqName, byteOutput) byteOutput = 0 countBits = 0 countAddresses = countAddresses + 1 if countAddresses % 100000 == 0: #compute statistics savedData = filem.openByteFile(arqName) scalc = Statistics() pvalue = scalc.monobitTest(savedData) print("\t\tAM:" + str(scalc.computeAM(savedData))) print("\t\tEntropy:" + str(scalc.computeEntropy(savedData))) if pvalue < 0.01: monobitresult = 1 else: monobitresult = 0 print("\t\tMonobit test (p-value):" + str(pvalue) + ", PASS:"******"SequentialLSBAddressOutput_" + str( countChunk) + ".data"
def setUp(self): conf = 0.95 # Confidence n_bits = 1000 # Number of bits per packet tx_rate = 50 # Tx rate in Mbps self.stat = Statistics(n_bits, tx_rate, conf)
""" This file contains modules used to calculate the statistics for elastic src """ import os from src.constants import Constants from src.elastic.elastic import Elastic from src.statistics import Statistics from src.utils import Utils if __name__ == '__main__': UTILS = Utils(os.path.join("resources")) FIELDS = Constants.name_search_fields ALL_SUBSETS = UTILS.get_subsets(FIELDS) MISSPELLED_NAMES = UTILS.read_csv("names-misspelled.csv") CORRECT_NAMES = UTILS.read_csv("names-expected.csv") elastic = Elastic() STATISTICS = Statistics().calculate_statistics(CORRECT_NAMES, MISSPELLED_NAMES, ALL_SUBSETS, elastic, generate_reports=True) Statistics().generate_f1() UTILS.write_json_to_directory(STATISTICS, "reports") print(UTILS.get_shortest_fields_with_highest_f1_score(STATISTICS))