def and_test(): """Test the & operator""" all_good = True abc = bloom_filter.BloomFilter(max_elements=100, error_rate=0.01, redis_connection=redis) for character in ['a', 'b', 'c']: abc += character bcd = bloom_filter.BloomFilter(max_elements=100, error_rate=0.01, redis_connection=redis) for character in ['b', 'c', 'd']: bcd += character abc_and_bcd = abc abc_and_bcd &= bcd if 'a' in abc_and_bcd: sys.stderr.write('a in abc_and_bcd, but should not be\n') all_good = False if not 'b' in abc_and_bcd: sys.stderr.write('b not in abc_and_bcd, but should be\n') all_good = False if not 'c' in abc_and_bcd: sys.stderr.write('c not in abc_and_bcd, but should be') all_good = False if 'd' in abc_and_bcd: sys.stderr.write('d in abc_and_bcd, but should not be') all_good = False return all_good
def key_test(): """ Test that we can do everything with a key too """ class Dummy(): def __init__(self, biscuit): self.biscuit = biscuit def get(self): return self.biscuit def dummy_key(dummy): return dummy.get() dummy_filter = bloom_filter.BloomFilter(key=dummy_key) dummies = [Dummy(3), Dummy('a'), Dummy('b')] for dummy in dummies: dummy_filter += dummy for dummy in dummies: if dummy not in dummy_filter: return False # failed to insert other_dummy_filter = bloom_filter.BloomFilter(key=dummy_key) other_dummy_filter += Dummy('hello world') dummy_filter |= other_dummy_filter for dummy in dummies + [Dummy('hello world')]: if dummy not in dummy_filter: return False dummy_filter &= other_dummy_filter for dummy in dummies: if dummy in dummy_filter: return False if Dummy('hello world') not in dummy_filter: return False return True
def or_test(): """Test the | operator""" all_good = True abc = bloom_filter.BloomFilter(max_elements=100, error_rate=0.01) for character in ['a', 'b', 'c']: abc += character bcd = bloom_filter.BloomFilter(max_elements=100, error_rate=0.01) for character in ['b', 'c', 'd']: bcd += character abc_and_bcd = abc abc_and_bcd |= bcd if not 'a' in abc_and_bcd: sys.stderr.write('a not in abc_and_bcd, but should be') all_good = False if not 'b' in abc_and_bcd: sys.stderr.write('b not in abc_and_bcd, but should be') all_good = False if not 'c' in abc_and_bcd: sys.stderr.write('c not in abc_and_bcd, but should be') all_good = False if not 'd' in abc_and_bcd: sys.stderr.write('d not in abc_and_bcd, but should be') all_good = False if 'e' in abc_and_bcd: sys.stderr.write('e in abc_and_bcd, but should not be') all_good = False return all_good
def test_contains(): """ Ensures the bloom filter returns True/False when approriate with contains operations """ bloomFilter = bloom_filter.BloomFilter(100000, 0.03) assert bloomFilter.insert("GCGTTT") == True assert bloomFilter.insert("AAACTG") == True assert bloomFilter.contains("GT") == False assert bloomFilter.contains("GTATCGGGT") == False assert bloomFilter.contains("GCGTTT") == True assert bloomFilter.contains("AAACTG") == True
def setup_bloom_filter(num_prod): bf.gb_fp_prob = 0.001 bf.gb_hash_count = 4 seed_init = random.randint(1, 10000) bf.init_seed(seed_init) #print("Seed: ", bf.gb_rand) bf_sample = bf.BloomFilter(num_prod)
def __init__(self, distance=2, bloomfilter=None, filter_dothtml=True, ignore_param_value=True): self.distance = distance self.filter_dothtml = filter_dothtml self.ignore_param_value = ignore_param_value self._simindex = simhash.SimhashIndex([], k=distance) self.bfilter = bloom_filter.BloomFilter( ) if not bloomfilter else bloom_filter
def create_merged_bf_pool(ind_prod, num_of_producers, collected_candidates_ids, correct_update_ids, lj_prod_bfs, merging_threshold): collected_candidates_bf = blf.BloomFilter(num_of_producers) collected_candidates_id = collected_candidates_ids[ind_prod, :] isMajFound = is_majority_found(collected_candidates_id, correct_update_ids, len(collected_candidates_id)) if isMajFound: #print("producer ", ind_prod, "has a majority ") collected_correct_candidates_id = set(collected_candidates_id).intersection(correct_update_ids) # Loop over the ID of each producer. for each producer, check in how many BFS it is found in these ID. # If above threshold, ass the ID of the producer to a new BF. '''for id_prod in range(num_of_producers): count_id = 0 for id_cand in collected_correct_candidates_id: id_bf = lj_prod_bfs[id_cand] if id_bf.check(id_prod): count_id += 1 if count_id > merging_threshold: collected_candidates_bf.add(id_prod, False) #print("found producer ", id_prod, "in ", count_id, " BFs") ''' #alternatively, merge the BF with count: collected_quantitities_merged_bf = blf.BloomFilter(num_of_producers) for id_cand in collected_correct_candidates_id: collected_quantitities_merged_bf.merge_additive(lj_prod_bfs[id_cand]) for id_prod in range(num_of_producers): if collected_quantitities_merged_bf.check_additive(id_prod, merging_threshold): collected_candidates_bf.add(id_prod, False) #ind_prod == 5 and #if id_prod not in correct_update_ids: # print("Passing a wrong id (",id_prod,") into merged BF") #collected_candidates_bf.add_additive(id_prod) #print("The BF or producer ",ind_prod," has ", sum(collected_candidates_bf.bit_array), " bits to 1") #print("Producer ",ind_prod, " BF merged : ", collected_quantitites_merged_bf.num_array) return collected_candidates_bf
def create_bloom_filter(sketch_config, filter_stats): global bloomFilter insertion_tput_records = [] # print("Creating the sketch. This might take a while ...") bloomFilter = bloom_filter.BloomFilter(sketch_config.expected_items, sketch_config.fp_prob) items = 0 load_factor_step_size = bloomFilter.expected_num / 10 step = 1 start = time.time() if sketch_config.k == 0: t1 = time.time() for read in read_list: if bloomFilter.insert(read.line) == False: break items += 1 if items >= load_factor_step_size * step: insertion_tput_records.append(load_factor_step_size / (time.time() - t1)) step += 1 t1 = time.time() else: failed = False r = 0 t1 = time.time() while not failed and r < len(read_list): for i in range(len(read_list[r].line) - sketch_config.k): if bloomFilter.insert( read_list[r].line[i:i + sketch_config.k]) == False: failed = True break items += 1 if items >= load_factor_step_size * step: insertion_tput_records.append(load_factor_step_size / (time.time() - t1)) step += 1 t1 = time.time() r += 1 end = time.time() filter_stats["items"] = items filter_stats["constr_speed"] = items / (end - start) filter_stats["load_factor"] = items / bloomFilter.expected_num filter_stats["total_size"] = bloomFilter.get_size() filter_stats["bpi"] = (filter_stats["total_size"] / items) * 8 filter_stats["insertion_tput"] = insertion_tput_records
def create_list_of_bfs(num_of_producers, collected_quantities_ids, correct_producers_id): producers_bf = [] for i in range(num_of_producers): #print("producer ", i) collected_quantities_id = collected_quantities_ids[i, :] isMajFound = is_majority_found(collected_quantities_id, correct_producers_id, len(collected_quantities_id)) collected_quantities_bf = blf.BloomFilter(num_of_producers) if isMajFound: collected_correct_quantities_id = set(collected_quantities_id).intersection(correct_producers_id) for j in collected_correct_quantities_id: collected_quantities_bf.add(j, False) producers_bf.append(collected_quantities_bf) return producers_bf
def bloom_minimum_spanning_tree(graph_obj, starting_vertex, return_bit_graph=False): print("Starting Bloom Filter MST...") graph = graph_obj.graph mst = defaultdict(set) visited = bloom_filter.BloomFilter(len( graph.keys())) # Replace set w/ Bloom edge_map = bitarray(graph_obj.n_edges) edge_map.setall(0) edges = [(cost[0], cost[1], starting_vertex, to) for to, cost in graph[starting_vertex].items()] heapq.heapify(edges) total_cost = 0 while edges: cost, edge_idx, frm, to = heapq.heappop(edges) if not visited.probabilistic_contains(str(to)): visited.add(str(to)) total_cost += cost mst[frm].add(to) edge_map[edge_idx] = 1 for to_next, cost in graph[to].items(): if not visited.probabilistic_contains(str(to_next)): heapq.heappush(edges, (cost[0], cost[1], to, to_next)) edge_map_space = sys.getsizeof(edge_map) bf_space = visited.memory_used print( "Bloom Filter Space: " + str(bf_space), "Bloom Filter Filled: " + str(visited.percentage_filled), "Graph storage space: " + str(edge_map_space), "Total storage space: " + str(bf_space + edge_map_space), ) print() if not return_bit_graph: return total_cost, visited.get_internals() else: return total_cost, visited.get_internals(), edge_map
def make_bfs(num_of_producers, prop_bf): seed_init = random.randint(1, 10000) bf.init_seed(seed_init) global_bf = bf.BloomFilter(num_of_producers) list_of_bfs = [] num_prod_in_list = int(num_of_producers * prop_bf) num_item_added_to_bf = int(num_prod_in_list * prop_bf) list_of_corr_prod = [i for i in range(num_prod_in_list)] for i in range(num_of_producers): prod_list = random.sample(list_of_corr_prod, num_item_added_to_bf) prod_bf = global_bf for j in prod_list: prod_bf.add(j, False) list_of_bfs.append(prod_bf) return list_of_bfs
def test_correctness(self): fileh = self.init_logging('correctness', logging.DEBUG) b = bloom_filter.BloomFilter(capacity=100) added = b.add('Hello') logging.info(f'added Hello: {added}') added = b.add('Hello') logging.info(f'added Hello: {added}') logging.info(f'b.count: {b.count}') logging.info(f'Hello in b: {"Hello" in b}') logging.info(f'Hola in b: {"Hola" in b}') logging.info(f'remove Hola: {b.remove("Hola")}') logging.info(f'remove Hello: {b.remove("Hello")}') logging.info(f'b.count: {b.count}') fileh.close()
def itterate_bfs(num_of_producers, num_runs, prop_bf, fp_rate, hash_count): #num_runs = 200 seed_init = random.randint(1, 10000) bf.init_seed(seed_init) bf.gb_fp_prob = fp_rate bf.gb_hash_count = hash_count global_bf = bf.BloomFilter(num_of_producers) num_prod_in_list = int(num_of_producers * prop_bf) fp_count = 0 num_false_prod = num_of_producers - num_prod_in_list list_of_corr_prod = [i for i in range(num_prod_in_list)] for j in list_of_corr_prod: global_bf.add(j, False) print ("Run:",num_of_producers," producers, with ",num_false_prod," wrong prod for ",num_runs,"cycles, BF = [",fp_rate, ", ",hash_count, ", ",global_bf.size,"]") pool = mp.Pool(mp.cpu_count()) fp_counts = pool.starmap(check_id_bf, [(num_of_producers+i, global_bf, seed_init) for i in range(num_runs*num_false_prod)]) pool.close() fp_count = sum(fp_counts) '''for m in range(num_runs*num_false_prod): #bad_list = random.sample(range(100000),num_false_prod) for l in bad_list: if global_bf.check(random.randint(1,100000)) == True: fp_count += 1 ''' #sum of list fp_count print("False Positive ", fp_count, " --> <", fp_count / num_runs, ">") return fp_count
def __init__(self, mem_dict, file_name, compresstype): self.file_name = file_name self.compresstype = compresstype self.bf = bloom_filter.BloomFilter(4) if isinstance(mem_dict, dict): mem_list = [] for key in mem_dict: mem_list.append((key, mem_dict[key])) mem_list.sort() else: mem_list = mem_dict if self.compresstype == 2: fp_data = gzip.open(self.file_name + '_data.dat.gz', 'wb') fp_index = gzip.open(self.file_name + '_idx.dat.gz', 'wb') else: fp_data = open(self.file_name + '_data.dat', 'w') fp_index = open(self.file_name + '_idx.dat', 'w') next_offset = 0 for item in mem_list: self.bf.update(item[0]) string = item[0] + "\t" + item[1] + "\n" if self.compresstype == 1: current_item = zlib.compress(string, 7) else: current_item = string fp_data.write(current_item) fp_index.write(item[0] + "\t" + str(next_offset) + '\n') next_offset += len(current_item) if platform.system() == "Windows" and self.compresstype == 0: next_offset += 1 # do it for windows if COMPRESS_TEST: if compresstype == 2: print "file size:", os.stat(self.file_name + "_data.dat.gz").st_size else: print "file size:", os.stat(self.file_name + "_data.dat").st_size
def _test(description, values, trials, error_rate, probe_bitnoer=None, filename=None): # pylint: disable=R0913,R0914 # R0913: We want a few arguments # R0914: We want some local variables too. This is just test code. """Some quick automatic tests for the bloom filter class""" if not probe_bitnoer: probe_bitnoer = bloom_filter.get_bitno_lin_comb all_good = True divisor = 100000 bloom = bloom_filter.BloomFilter( max_elements=trials * 2, error_rate=error_rate, probe_bitnoer=probe_bitnoer, filename=filename, start_fresh=True, ) message = '\ndescription: %s num_bits_m: %s num_probes_k: %s\n' filled_out_message = message % ( description, bloom.num_bits_m, bloom.num_probes_k, ) sys.stdout.write(filled_out_message) print('starting to add values to an empty bloom filter') for valueno, value in enumerate(values.generator()): reverse_valueno = values.length() - valueno if reverse_valueno % divisor == 0: print('adding valueno %d' % reverse_valueno) bloom.add(value) print('testing all known members') include_in_count = sum(include in bloom for include in values.generator()) if include_in_count == values.length(): # Good pass else: sys.stderr.write('Include count bad: %s, %d\n' % (include_in_count, values.length())) all_good = False print('testing random non-members') false_positives = 0 for trialno in my_range(trials): if trialno % divisor == 0: sys.stderr.write('trialno countdown: %d\n' % (trials - trialno)) while True: candidate = ''.join(random.sample(CHARACTERS, 5)) # If we accidentally found a member, try again if values.within(candidate): continue if candidate in bloom: # print 'We erroneously think %s is in the filter' % candidate false_positives += 1 break actual_error_rate = float(false_positives) / trials if actual_error_rate > error_rate: sys.stderr.write('%s: Too many false positives: actual: %s, expected: %s\n' % ( sys.argv[0], actual_error_rate, error_rate, )) all_good = False return all_good
def test_bloom_filter(): """Unit tests for BloomFilter class""" if sys.argv[1:] == ['--performance-test']: performance_test = True else: performance_test = False all_good = True all_good &= _test('states', States(), trials=100000, error_rate=0.01) all_good &= _test('random', Random_content(), trials=10000, error_rate=0.1) all_good &= _test('random', Random_content(), trials=10000, error_rate=0.1, probe_bitnoer=bloom_filter.get_bitno_seed_rnd) filename = 'bloom-filter-rm-me' all_good &= _test('random', Random_content(), trials=10000, error_rate=0.1, filename=filename) all_good &= and_test() all_good &= or_test() all_good &= key_test() if performance_test: sqrt_of_10 = math.sqrt(10) # for exponent in range(5): # this is a lot, but probably not unreasonable for exponent in range(19): # this is a lot, but probably not unreasonable elements = int(sqrt_of_10 ** exponent + 0.5) for filename in [None, 'bloom-filter-rm-me', ('bloom-filter-rm-me', 768 * 2 ** 20), ('bloom-filter-rm-me', -1)]: description = give_description(filename) key = '%s %s' % (description, elements) database = anydbm.open('performance-numbers', 'c') if key in database.keys(): database.close() continue if elements >= 100000000 and description == 'seek': continue if elements >= 100000000 and description == 'mmap': continue if elements >= 1000000000 and description == 'array': continue time0 = time.time() all_good &= _test( 'evens %s elements: %d' % (give_description(filename), elements), Evens(elements), trials=elements, error_rate=1e-2, filename=filename, ) time1 = time.time() delta_t = time1 - time0 # file_ = open('%s.txt' % description, 'a') # file_.write('%d %f\n' % (elements, delta_t)) # file_.close() database = anydbm.open('performance-numbers', 'c') database[key] = '%f' % delta_t database.close() # test prob count ok bloom = bloom_filter.BloomFilter(1000000, error_rate=.99) all_good &= bloom.num_probes_k == 1 if not all_good: sys.stderr.write('%s: One or more tests failed\n' % sys.argv[0]) sys.exit(1)
def bench_bloomfilter(file_path): print("---------------- Bloom filters benchmark ----------------") words = [] with open(file_path) as f: words = f.read().splitlines() print(f"Loaded: {len(words)} words") desired_false_prob = 0.01 size = bloom_filter.optimal_bit_size(desired_false_prob, len(words)) hash_count = bloom_filter.optimal_hash_count(size, len(words)) print(f"{desired_false_prob} error => {size} bits and {hash_count} hash") tracemalloc.start() print("Creating the bloom filter data structure") start_time = time.time_ns() bf = bloom_filter.BloomFilter(size, hash_count) print(f"Adding {len(words)} elements") for word in words: bf.add(word) end_time = time.time_ns() elapsed_time = end_time - start_time print( f"Time to add all elements: {elapsed_time}ns ({elapsed_time / 1000000}ms)" ) print(f"Looking up every element") start_time = time.time_ns() for word in words: if bf.exists(word) == False: print(f"Error: {word} wasn't found") end_time = time.time_ns() elapsed_time = end_time - start_time print( f"Time to know if the elements were present: {elapsed_time}ns ({elapsed_time / 1000000}ms)" ) current, peak = tracemalloc.get_traced_memory() tracemalloc.stop() print( f"Current memory usage is {current / 10**6}MB; Peak was {peak / 10**6}MB" ) print(f"Clearing the bloomfilter") start_time = time.time_ns() bf.clear() end_time = time.time_ns() elapsed_time = end_time - start_time print( f"Clearing the bloomfilter took: {elapsed_time}ns ({elapsed_time / 1000000}ms)" ) print(f"Validating the error count") errorCount = 0 for word in words: if bf.exists(word) == True: errorCount += 1 bf.add(word) print( f"There was {errorCount} error(s) [error rate: {errorCount / len(words)}]" )
import bloom_filter print("---------------- Bloom filters test ----------------") print("Creating the bloom filter data structure") bf = bloom_filter.BloomFilter(8, 5) val1 = "Monkey" val2 = "Computer" print(f"Making sure that the word '{val1}' isn't in the bloom filter yet") print(f"'{val1}' exists? {bf.exists(val1)}") print(f"\nAdding the word '{val1}' to the bloom filter") bf.add(val1) print(f"\nMaking sure the word '{val1}' is present in the bloom filter") print(f"'{val1}' exists? {bf.exists(val1)}") print(f"\nMaking sure the word'{val2}' isn't present in the bloom filter") print(f"'{val2}' exists? {bf.exists(val2)}")
def check_bloom_filter(self, bf_public_key, is_malicious=False): bf = bloom_filter.BloomFilter(bf_public_key, k, int(self.number_of_nodes() * prob))
def __init__(self, initial_nodes=0, ind=None): Graph.__init__(self) self.removed = [] self.G = nx.Graph() self.K = nx.Graph() self.member_nodes = self.make_nodes2(initial_nodes, malicious_rate, self) self.miners = [] random.shuffle(self.member_nodes) array_to_del = [] for i in range(initial_nodes // miners_rate): node = self.member_nodes[i] if (self.member_nodes[i].is_malicious): if i < 23: miner = Miner(self, is_miner=True, is_malicious=True, min_int=i, money=randint(1, 200)) else: miner = Miner(self, is_miner=True, is_malicious=True, min_int=i, money=randint(100000, 5000000)) else: if i < 23: miner = Miner(self, is_miner=True, is_malicious=False, min_int=i, money=randint(1, 200)) else: miner = Miner(self, is_miner=True, is_malicious=False, min_int=i, money=randint(100000, 5000000)) miner.set_private_key(node.get_private_key()) miner.public_key = node.public_key self.miners.append(miner) array_to_del.append(node) #print("Minners added!") self.G.add_nodes_from(self.miners) for node in array_to_del: self.member_nodes.remove(node) self.everything = [] for miner in self.miners: self.everything.append(miner) #for node in self.member_nodes: # self.everything.append(node) random.shuffle(self.everything) array = [] array2 = [] count = 0 # bloomfilter generate for miner in self.miners: # print(str(miner.public_key.n)) array = bin(int((str(miner.public_key.n))))[2:] bf = bloom_filter.BloomFilter(array, k, int(self.number_of_nodes() * prob)) if (miner.is_malicious): count += 1 true = 0 ## for node in self.everything: if (miner != node): if (bf.check(str(node.public_key.n))): node.trust_table[miner.public_key.n] = entrance_grade true += 1 array2.append(true) # Beetweeness of judges """ self.K.add_nodes_from(self.everything) # Keys graph generating for i in range(len(self.everything)): for j in range(i+1, len(self.everything)): for key in self.everything[i].trust_table.keys(): if key in self.everything[j].trust_table.keys(): self.K.add_edge(self.everything[i], self.everything[j], weight=1) break bc = nx.betweenness_centrality(self.K, normalized=False) bcs = [] for node in self.everything: bcs.append(bc[node]) #nx.draw(self.K, cmap=plt.get_cmap('jet')) #plt.show() with open("bc_judges/bc_judges_500-{}.json".format(ind),"w") as outfile: json.dump(bcs ,outfile) """ self.insertion_queue = [ ] # First element = (is node signing), second = Node() # transactions self.identify = 0 self.transactions = [] self.validated_transactions = [] self.miner_numbers = len(self.miners) self.all_nodes = self.number_of_nodes() # fluxo de fechamento de bloco self.block_closed_flow = [] # Variaveis relacionadas ao fluxo da blockchain self.new_blocks_flow = [] self.bc_turn = 1 self.id_block_bc = 0 self.blocks_from_blockchain_json = [] # vez de qual minerador fechar um bloco na blockchain de controle self.bc_control_turn = self.calculate_bc_turn(0) #print("first one calculated") #sys.exit() # blocos de controle self.new_blocks_flow_control = [] # json files self.emiss_trans = [] self.block_mined_json = [] # new nodes array self.nodes_approved = [] self.miners_approved = [] self.keys_tuple = [] # ids self.id_exp_trans = 0 self.id_block_control = 0 self.trans_in_blocks_id = 0 # transações de expulsão self.exp_transacts = {} for miner in self.miners: self.exp_transacts[miner.public_key.n] = [0, [], False, []] # transações de expulsão validadas self.exp_transacts_validated = [] # transações self.final_exp_trans = {} # blocos da blockchain self.generated_block = Block() # blocks de controle self.generated_block_control = Block() # arquivos.json self.blocks_from_blockchain_json = [] self.exp_transacts_json = [] self.final_exp_transacts_json = [] self.blocks_from_controlbc_json = [] self.expss = 0 self.data = {} self.iden = 0
def __init__(self, cap=100000, error_rate=0.0001): self.bfilter = bloom_filter.BloomFilter(max_elements=cap, error_rate=error_rate)
def create_bf_from_list(num_of_producers, correct_producers_id): bf = blf.BloomFilter(num_of_producers) for id in correct_producers_id: bf.add(id, False) return bf
def __init__(self, initial_nodes=0): Graph.__init__(self) self.member_nodes = self.make_nodes2(initial_nodes, malicious_rate, self) self.miners = [] random.shuffle(self.member_nodes) array_to_del = [] for i in range(initial_nodes // miners_rate): node = self.member_nodes[i] if (self.member_nodes[i].is_malicious): miner = Miner(self, is_miner=True, is_malicious=True) else: miner = Miner(self, is_miner=True, is_malicious=False) miner.set_private_key(node.get_private_key()) miner.public_key = node.public_key self.miners.append(miner) array_to_del.append(node) for node in array_to_del: self.member_nodes.remove(node) self.everything = [] for miner in self.miners: self.everything.append(miner) for node in self.member_nodes: self.everything.append(node) random.shuffle(self.everything) array = [] array2 = [] count = 0 # bloomfilter generate for miner in self.miners: # print(str(miner.public_key.n)) array = bin(int((str(miner.public_key.n))))[2:] bf = bloom_filter.BloomFilter(array, k, int(self.number_of_nodes() * prob)) if (miner.is_malicious): count += 1 true = 0 ## for node in self.everything: if (miner != node): if (bf.check(str(node.public_key.n))): node.trust_table[miner.public_key.n] = entrance_grade true += 1 array2.append(true) self.insertion_queue = [ ] # First element = (is node signing), second = Node() # transactions self.identify = 0 self.transactions = [] self.validated_transactions = [] self.miner_numbers = len(self.miners) self.all_nodes = self.number_of_nodes() # fluxo de fechamento de bloco self.block_closed_flow = [] # Variaveis relacionadas ao fluxo da blockchain self.new_blocks_flow = [] self.bc_turn = 0 self.id_block_bc = 0 self.blocks_from_blockchain_json = [] # vez de qual minerador fechar um bloco na blockchain de controle self.bc_control_turn = 0 # blocos de controle self.new_blocks_flow_control = [] # json files self.emiss_trans = [] self.block_mined_json = [] # new nodes array self.nodes_approved = [] self.miners_approved = [] self.keys_tuple = [] # ids self.id_exp_trans = 0 self.id_block_control = 0 self.trans_in_blocks_id = 0 # transações de expulsão self.exp_transacts = {} for miner in self.miners: self.exp_transacts[miner.public_key.n] = [0, [], False, []] # transações de expulsão validadas self.exp_transacts_validated = [] # transações self.final_exp_trans = {} # blocos da blockchain self.generated_block = Block() # blocks de controle self.generated_block_control = Block() # arquivos.json self.blocks_from_blockchain_json = [] self.exp_transacts_json = [] self.final_exp_transacts_json = [] self.blocks_from_controlbc_json = [] self.expss = 0 self.data = {} self.iden = 0
def __init__(self, initial_nodes=0): Graph.__init__(self) self.member_nodes = self.make_nodes(initial_nodes, self) self.miners = self.make_miners(self.member_nodes, round(initial_nodes/miners_rate), self, malicious_rate) random.shuffle(self.miners) self.everything = [] for miner in self.miners: self.everything.append(miner) for node in self.member_nodes: self.everything.append(node) random.shuffle(self.everything) array = [] array2 = [] for miner in self.miners: array = bin(int((str(miner.public_key.n))))[2:] bf = bloom_filter.BloomFilter(array,k,int(self.number_of_nodes()*prob)) true = 0 ## for node in self.everything: if(miner != node): if(bf.check(str(node.public_key.n))): node.trust_table[miner.public_key.n] = entrance_grade true += 1 array2.append(true) self.insertion_queue = [] # First element = (is node signing), second = Node() # transactions self.identify = 0 self.transactions = [] self.validated_transactions = [] self.miner_numbers = len(self.miners) self.all_nodes = self.number_of_nodes() # fluxo de fechamento de bloco self.block_closed_flow = [] # Vez de qual minerador gerar um bloco na blockchain self.vez = 0 # vez de qual minerador fechar um bloco na blockchain de controle self.vez2 = 0 # flow of new nodes # blocos chegada block chain self.new_blocks_flow = [] self.made_key = [] # blocos de controle self.new_blocks_flow_control = [] # json files self.emiss_trans = [] self.block_mined_json = [] # new nodes array self.nodes_approved = [] self.miners_approved = [] self.keys_tuple = [] # ids self.id_block_bc = 0 self.id_exp_trans = 0 self.id_block_control = 0 self.trans_in_blocks_id = 0 # transações de expulsão self.exp_transacts = [] # transações de expulsão validadas self.exp_transacts_validated = [] # transações self.final_exp_trans = [] # blocos da blockchain self.generated_block = [] # blocks de controle self.generated_block_control = [] # arquivos.json self.blocks_from_blockchain_json = [] self.exp_transacts_json = [] self.final_exp_transacts_json = [] self.blocks_from_controlbc_json = []
def test_construction(): """ Ensures the bloom filter is constructed as we would expect """ bloomFilter = bloom_filter.BloomFilter(100000, 0.03) assert bloomFilter.expected_num == 100000 assert bloomFilter.size == 729844 assert bloomFilter.num_hashes == 5
def __init__(self, name, size=None, max_elements=1e4, error_rate=1e-8): self.name = name self.max_elements = max_elements self.error_rate = error_rate self._set = bloom_filter.BloomFilter(max_elements=max_elements, error_rate=error_rate)
def test_insert(): """ Ensures the bloom filter can insert items and report they are in there """ bloomFilter = bloom_filter.BloomFilter(100000, 0.03) assert bloomFilter.insert("GCGTTT") == True assert bloomFilter.contains("GCGTTT") == True
def reset(self): self._set = bloom_filter.BloomFilter(max_elements=self.max_elements, error_rate=self.error_rate)
def check_bloom_filter(self,bf_public_key): bf = bloom_filter.BloomFilter(5,0.1) bf.add(str(bf_public_key.n)) if(bf.check(str(self.public_key))): print("sou juiz desse cara") self.trust_table[bf_public_key.n] = 6.0