def getMixNodeList(self, source): request_creator = RequestCreator() data_string, serialized_destination = request_creator.get_all_mixnode_request( source) response = self.network_sender.send_data_wait(data_string, serialized_destination) return json.loads(response.decode())
def handle_PIR(self, decrypted_msg, client_pk): time_queued = time.perf_counter() - self.t_accepted log_info(">>>>> TIME QUEUED: {}".format(time_queued)) t1 = time.perf_counter() print("TRYING TO FETCH") answer = self.dbnode.fetch_answer(decrypted_msg) print("ANSWER:", answer) reply = encode(answer) encrypted_reply = encode(self.dbnode.encrypt(reply, client_pk)) nymtuple = decrypted_msg['nymtuple'] first_node = decode(nymtuple[0]) header, delta = package_surb(getGlobalSphinxParams(), nymtuple, encrypted_reply) self.dbnode.get_mixnode_list() json_data, dest = RequestCreator().post_msg_to_mix( { 'ip': first_node[1], 'port': self.mixport }, { 'header': header, 'delta': delta }) t2 = time.perf_counter() elapsed_time = (t2 - t1) log_info("TIME ELAPSED: {}".format(elapsed_time)) self.network_sender.send_data(json_data, dest)
def __init__(self, myid, mixnode, client, message_pool): Thread.__init__(self) self.id = myid self.mixnode = mixnode self.network_sender = NetworkSender() self.request_creator = RequestCreator() self.client = client self.message_pool = message_pool
def handle_read(self): data = super().handle_read() if data: data = pickle.loads(data) if data['type'] == RequestType.push_to_mix.value: start = time.time() operation = '' data = decode(data['payload']) header = data['header'] delta = data['delta'] result = self.mixnode.process(header, delta) if result[0] == Relay_flag: flag, addr, header, delta = result json_data, dest = RequestCreator().post_msg_to_mix( {'ip': addr, 'port': self.mixport}, {'header': header, 'delta': delta} ) with self.pool_lock: self.mixnode.pool_item((json_data, dest)) operation = '[RELAY_FLAG] pool' elif result[0] == Dest_flag: flag, msg, dest, _ = result json_data, dest = RequestCreator().post_msg_to_db(dest, msg) self.network_sender.send_data(json_data, dest) operation = '[DEST_FLAG] send' elif result[0] == Surb_flag: flag, dest, myid, delta = result msg = {'myid': myid, 'delta': delta} print("CACHING") self.mixnode.client_cache.setdefault(myid, []).append(msg) operation = '[SURB_FLAG] cache' end = time.time() timestamp = datetime.fromtimestamp( end - start).strftime('%M:%S') logger.log_info( '[TIME] MIX LISTENER {} TOOK {}'.format(operation, timestamp)) elif data['type'] == RequestType.client_poll.value: client_id = data['id'] with self.backlog_lock: self.mixnode.client_backlog.add((client_id, self.socket))
def prepare_sending_pk(public_key, server_config): key_server_ip = server_config['pkserver'] port = server_config['port'] try: response = os.system("ping -c 1 " + key_server_ip) if response != 0: raise ValueError( "Server: {} cannot be reached. The key was not published" .format(ip)) else: request_creator = RequestCreator() json_data, destination = request_creator.post_db_key_request( { 'ip': key_server_ip, 'port': port }, { 'id': public_key[0], 'pk': public_key[2] }) return (json_data, destination) except Exception as error: print("Unexpected error: {}".format(error)) return None return None
def package_message(self, index, db, pir_xor, portEnum, request_type=RequestType.push_to_db.value, mix_subset=5, session_name=None): self.public_key, self.private_key = self.encryptor.keyGenerate( session_name) self.session_name = session_name def json_encode(arguments): return json.dumps(dict(arguments)) def prepare_forward_message(mixnodes_dict, message, dest, key, portEnum): params = getGlobalSphinxParams() group = params.group.G use_nodes_forward = rand_subset(mixnodes_dict.keys(), SecurityParameters.NUMBER_OF_MIXES) use_nodes_backward = rand_subset( mixnodes_dict.keys(), SecurityParameters.NUMBER_OF_MIXES) nodes_routing_forward = list(map(Nenc, use_nodes_forward)) nodes_routing_backward = list(map(Nenc, use_nodes_backward)) pks_chosen_nodes_forward = [ EcPt.from_binary(mixnodes_dict[key], group) for key in use_nodes_forward ] pks_chosen_nodes_backward = [ EcPt.from_binary(mixnodes_dict[key], group) for key in use_nodes_backward ] surbid, surbkeytuple, nymtuple = create_surb( params, nodes_routing_backward, pks_chosen_nodes_backward, self.ip) self.surbDict[surbid] = {'surbkeytuple': surbkeytuple} message['nymtuple'] = nymtuple message = encode(message) json_msg = self.encryptForDB(message, key, self.session_name) print(json_msg, len(json_msg)) header, delta = create_forward_message(params, nodes_routing_forward, pks_chosen_nodes_forward, dest, json_msg) return (header, delta, use_nodes_forward[0], surbid, use_nodes_backward[-1]) if len(self.mixnode_list) == 0: print("There are no mix-nodes available.") return if len(self.db_list) == 0: print("There are no databases available.") return db_dest, key = self.create_db_destination(db, portEnum.db.value) message = self.create_db_message( index, { 'pir_xor': pir_xor, 'request_type': request_type, 'pk': self.public_key }) header, delta, first_mix, surbid, mix_to_poll = prepare_forward_message( self.mixnode_list, message, db_dest, key, portEnum) self.surbDict[surbid]['index'] = index self.surbDict[surbid]['source'] = mix_to_poll self.surbDict[surbid]['key'] = self.private_key json_data, dest = RequestCreator().post_msg_to_mix( { 'ip': first_mix, 'port': portEnum.mix.value }, { 'header': header, 'delta': delta }) if Debug.dbg is True: dest['ip'] = b'0.0.0.0' return (json_data, dest)
num_events_list = [ 1000000, 1000000, 500000, 250000, 100000, 100000, 100000, 100000, 100000 ] years = [2017, 2018] proc_card_link = 'https://github.com/cms-sw/genproductions/blob/master/bin/Powheg/production/2017/13TeV/Higgs/WminusHJ_HanythingJ_NNPDF31_13TeV/HWminusJ_HanythingJ_NNPDF31_13TeV_Vhadronic_template.input' # Gridpack path templates for 2016 and 2017/2018 gridpack_path_templates = { '2017/2018': '/cvmfs/cms.cern.ch/phys_generator/gridpacks/2017/13TeV/powheg/V2/HWJ_slc6_amd64_gcc700_CMSSW_10_2_22_HWminusJ_M{__MASS__}/v1/HWJ_slc6_amd64_gcc700_CMSSW_10_2_22_HWminusJ_M{__MASS__}.tgz' } rc = RequestCreator(proc_tag='WminusH_HToInv', mass_points=mass_points, dataset_name_templates=dataset_name_templates, lhe_fragment_template=lhe_fragment_template, pythia_fragment_templates=pythia_fragment_templates, num_events_list=num_events_list, years=years, proc_card_link=proc_card_link, gridpack_path_templates=gridpack_path_templates) # Get the request information and store them into CSV files for each year rc.prepare_requests() rc.write_to_csv() ### Request creator for WplusH requests mass_points = [110, 150, 200, 300, 400, 500, 600, 800, 1000] dataset_name_templates = { 2017: 'WplusH_WToQQ_HToInvisible_M{__MASS__}_TuneCP5_13TeV_powheg_pythia8', 2018: