def _send_superdense(packet): """ Encodes and sends a qubit to send a superdense message. Args: packet (Packet): The packet in which to transmit. """ sender = packet.sender receiver = packet.receiver host_sender = network.get_host(sender) if not network.shares_epr(sender, receiver): Logger.get_instance().log('No shared EPRs - Generating one between ' + sender + " and " + receiver) q_id, _ = host_sender.send_epr(receiver, await_ack=True, block=True) assert q_id is not None q_superdense = host_sender.get_epr(receiver, q_id=q_id, wait=WAIT_TIME) else: q_superdense = host_sender.get_epr(receiver, wait=5) if q_superdense is None: Logger.get_instance().log('Failed to get EPR with ' + sender + " and " + receiver) raise Exception("couldn't encode superdense") _encode_superdense(packet.payload, q_superdense) # change id, so that at receiving they are not the same q_superdense.id = "E" + q_superdense.id packet.payload = q_superdense packet.protocol = REC_SUPERDENSE packet.payload_type = QUANTUM network.send(packet)
def _rec_classical(packet): """ Receives a classical message packet , parses it into sequence number and message and sends an ACK message to receiver. Args: packet (Packet): The packet in which to receive. Returns: dict : A dictionary consisting of 'message' and 'sequence number' """ message = packet.payload if packet.payload.content == ACK: message = Message(sender=packet.sender, content=ACK, seq_num=packet.seq_num) Logger.get_instance().log(packet.receiver + " received ACK from " + packet.sender + " with sequence number " + str(packet.seq_num)) else: # Send an ACK msg if seq_num is not -1, as long as not an ACK msg! if packet.seq_num != -1: _send_ack(packet.sender, packet.receiver, packet.seq_num) return message
def add_host(self, host): """ Adds the *host* to ARP table and updates the network graph. Args: host (Host): The host to be added to the network. """ Logger.get_instance().debug('host added: ' + host.host_id) self.ARP[host.host_id] = host self._update_network_graph(host)
def _send_ack(sender, receiver, seq_number): """ Send an acknowledge message from the sender to the receiver. Args: sender (str): The sender ID receiver (str): The receiver ID seq_number (int): The sequence number which to ACK """ Logger.get_instance().log('sending ACK:' + str(seq_number + 1) + ' from ' + receiver + " to " + sender) host_receiver = network.get_host(receiver) host_receiver.send_ack(sender, seq_number)
def _rec_qubit(packet): """ Receive a packet containing qubit information (qubit is transmitted externally) Args: packet (Packet): The packet in which to receive. """ Logger.get_instance().log(packet.receiver + ' received qubit ' + packet.payload.id + ' from ' + packet.sender) # Send ACK if seq_num is not -1 if packet.seq_num != -1: _send_ack(packet.sender, packet.receiver, packet.seq_num)
def _remove_network_node(self, host): """ Removes the host from the ARP table. Args: host (Host): The host to be removed from the network. """ try: self.classical_network.remove_node(host.host_id) except nx.NetworkXError: Logger.get_instance().error( 'attempted to remove a non-exiting node from network')
def _relay_message(packet): """ Reduce TTL of network packet and if TTL > 0, sends the message to be relayed to the next node in the network and modifies the header. Args: packet (RoutingPacket): Packet to be relayed """ packet.ttl -= 1 if packet.ttl != 0: network.send(packet) else: Logger.get_instance().log('TTL Expired on packet')
def main_genetique(nom, ratiovoisin=0.5, selec=100, random=False, crois=100, alpha=0.85, duel=True, gene=100, beta=0.15, withplot=False): pop = Population(nom) temps1 = time.time() temps2, temps_opti = 0, 0 alert = 0 meanfit = [] maxfit = [] genese(crois, pop, ratio_voisin=ratiovoisin) fichier = Logger(pop.instance, "algo_génétique", **{"% de voisins de SPT et LPT": ratiovoisin, "Nombre selection": selec, "Type de selection (random)": random, "Nombre croisement": crois, "% de croisement": alpha, "Type de croisement (duel)": duel, "Nombre génération": gene, "Facteur de mutation": beta, "Seed": pop.hash}) while pop.generation <= gene and temps2 < 900 and alert < int(gene * 0.3): print("Géneration: ", pop.generation, file=open(fichier.location, 'a')) evaluation(pop) selection(pop, selec, random=random) meanfit.append(pop.MeanFit()) maxfit.append(pop.MaxFit()) croisement(pop, alpha=alpha, beta=beta, duel=duel, n=crois) evaluation(pop) if pop.change: alert = 0 print("==========================Nouvel elite !==========================", file=open(fichier.location, 'a')) print(pop.elite, file=open(fichier.location, 'a')) temps_opti = time.time() - temps1 pop.change = False else: alert += 1 temps2 = time.time() - temps1 fichier.makespanFile(pop.elite.cout, pop.elite.sequence) fichier.fitOverTIme(meanfit, maxfit) fichier.tpsFile(temps_opti, "Optimum trouvé au bout de: ") fichier.tpsFile(temps2, "Temps total: ") if withplot: plot_genetique_fitness(meanfit, maxfit, fichier) return meanfit, maxfit, temps_opti
def stop(self, stop_hosts=False): """ Stops the network. """ Logger.get_instance().log("Network stopped") try: if stop_hosts: for host in self.ARP: self.ARP[host].stop(release_qubits=True) self._stop_thread = True if self._backend is not None: self._backend.stop() except Exception as e: Logger.get_instance().error(e)
def process(packet): """ Decodes the packet and processes the packet according to the protocol in the packet header. Args: packet (Packet): Packet to be processed. Returns: Returns what protocol function returns. """ protocol = packet.protocol if protocol == SEND_TELEPORT: return _send_teleport(packet) elif protocol == REC_TELEPORT: return _rec_teleport(packet) elif protocol == SEND_CLASSICAL: return _send_classical(packet) elif protocol == REC_CLASSICAL: return _rec_classical(packet) elif protocol == REC_EPR: return _rec_epr(packet) elif protocol == SEND_EPR: return _send_epr(packet) elif protocol == SEND_SUPERDENSE: return _send_superdense(packet) elif protocol == REC_SUPERDENSE: return _rec_superdense(packet) elif protocol == SEND_QUBIT: return _send_qubit(packet) elif protocol == REC_QUBIT: return _rec_qubit(packet) elif protocol == RELAY: return _relay_message(packet) elif protocol == SEND_KEY: return _send_key(packet) elif protocol == REC_KEY: return _rec_key(packet) elif protocol == SEND_GHZ: return _send_ghz(packet) elif protocol == REC_GHZ: return _rec_ghz(packet) elif protocol == SEND_BROADCAST: return _send_broadcast(packet) else: Logger.get_instance().error('protocol not defined')
def _route_quantum_info(self, sender, receiver, qubits): """ Routes qubits from sender to receiver. Args: sender (Host): Sender of qubits receiver (Host): Receiver qubits qubits (List of Qubits): The qubits to be sent """ def transfer_qubits(r, store=False, original_sender=None): for q in qubits: Logger.get_instance().log('transfer qubits - sending qubit ' + q.id) x_err_var = random.random() z_err_var = random.random() if x_err_var > (1 - self.x_error_rate): q.X() if z_err_var > (1 - self.z_error_rate): q.Z() q.send_to(self.ARP[r].host_id) Logger.get_instance().log('transfer qubits - received ' + q.id) # Unblock qubits in case they were blocked q.blocked = False if not store and self.ARP[r].q_relay_sniffing: self.ARP[r].q_relay_sniffing_fn(original_sender, receiver, q) if store and original_sender is not None: self.ARP[r].add_data_qubit(original_sender, q) route = self.get_quantum_route(sender, receiver) i = 0 while i < len(route) - 1: Logger.get_instance().log('sending qubits from ' + route[i] + ' to ' + route[i + 1]) if len(route[i:]) != 2: transfer_qubits(route[i + 1], original_sender=route[0]) else: transfer_qubits(route[i + 1], store=True, original_sender=route[0]) i += 1
def routing_algorithm(di_graph, source, target): """ Entanglement based routing function. Note: any custom routing function must have exactly these three parameters and must return a list ordered by the steps in the route. Args: di_graph (networkx DiGraph): The directed graph representation of the network. source (str): The sender ID target (str: The receiver ID Returns: (list): The route ordered by the steps in the route. """ # Generate entanglement network entanglement_network = nx.DiGraph() nodes = di_graph.nodes() for node in nodes: host = network.get_host(node) host_connections = host.get_connections() for connection in host_connections: if connection['type'] == 'quantum': num_epr_pairs = len( host.get_epr_pairs(connection['connection'])) # print(host.host_id, connection['connection'], num_epr_pairs) if num_epr_pairs == 0: entanglement_network.add_edge(host.host_id, connection['connection'], weight=1000) else: entanglement_network.add_edge(host.host_id, connection['connection'], weight=1. / num_epr_pairs) try: route = nx.shortest_path(entanglement_network, source, target, weight='weight') if source == 'A': print('-------' + str(route) + '-------') return route except Exception as e: Logger.get_instance().error(e)
def checksum_sender(host, q_size, receiver_id, checksum_size_per_qubit): bit_arr = np.random.randint(2, size=q_size) Logger.get_instance().log('Bit array to be sent: ' + str(bit_arr)) qubits = [] for i in range(q_size): q_tmp = Qubit(host) if bit_arr[i] == 1: q_tmp.X() qubits.append(q_tmp) check_qubits = host.add_checksum(qubits, checksum_size_per_qubit) checksum_size = int(q_size / checksum_size_per_qubit) qubits.append(check_qubits) checksum_cnt = 0 for i in range(q_size + checksum_size): if i < q_size: q = qubits[i] else: q = qubits[q_size][checksum_cnt] checksum_cnt = checksum_cnt + 1 host.send_qubit(receiver_id, q, await_ack=True)
def transfer_qubits(r, store=False, original_sender=None): for q in qubits: Logger.get_instance().log('transfer qubits - sending qubit ' + q.id) x_err_var = random.random() z_err_var = random.random() if x_err_var > (1 - self.x_error_rate): q.X() if z_err_var > (1 - self.z_error_rate): q.Z() q.send_to(self.ARP[r].host_id) Logger.get_instance().log('transfer qubits - received ' + q.id) # Unblock qubits in case they were blocked q.blocked = False if not store and self.ARP[r].q_relay_sniffing: self.ARP[r].q_relay_sniffing_fn(original_sender, receiver, q) if store and original_sender is not None: self.ARP[r].add_data_qubit(original_sender, q)
def checksum_receiver(host, q_size, sender_id, checksum_size_per_qubit): qubits = [] checksum_size = int(q_size / checksum_size_per_qubit) while len(qubits) < (q_size + checksum_size): q = host.get_data_qubit(sender_id, wait=WAIT_TIME) qubits.append(q) Logger.get_instance().log(str(host.host_id) + ': received qubit') checksum_qubits = [] checksum_cnt = 0 for i in range(len(qubits)): if checksum_cnt < checksum_size: checksum_qubits.append(qubits[q_size + i]['q']) checksum_cnt = checksum_cnt + 1 checksum_cnt = 1 for i in range(len(qubits) - checksum_size): qubits[i]['q'].cnot(checksum_qubits[checksum_cnt - 1]) if i == (checksum_cnt * checksum_size_per_qubit - 1): checksum_cnt = checksum_cnt + 1 errors = 0 for i in range(len(checksum_qubits)): if checksum_qubits[i].measure() != 0: errors += 1 print('---------') if errors == 0: Logger.get_instance().log('No error exist in UDP packet') else: Logger.get_instance().log('There were errors in the UDP transmission') print('---------') rec_bits = [] for i in range(len(qubits) - checksum_size): rec_bits.append(qubits[i]['q'].measure()) if errors == 0: print('---------') Logger.get_instance().log('Receiver received the classical bits: ' + str(rec_bits)) print('---------') return True return
def _process_queue(self): """ Runs a thread for processing the packets in the packet queue. """ while True: if self._stop_thread: break if not self._packet_queue.empty(): # Artificially delay the network if self.delay > 0: time.sleep(self.delay) packet = self._packet_queue.get() # Simulate packet loss packet_drop_var = random.random() if packet_drop_var > (1 - self.packet_drop_rate): Logger.get_instance().log("PACKET DROPPED") if packet.payload_type == protocols.QUANTUM: packet.payload.release() continue sender, receiver = packet.sender, packet.receiver if packet.payload_type == protocols.QUANTUM: self._route_quantum_info(sender, receiver, [packet.payload]) try: if packet.protocol == protocols.RELAY and not self.use_hop_by_hop: full_route = packet.route route = full_route[full_route.index(sender):] else: if packet.protocol == protocols.REC_EPR: route = self.get_classical_route(sender, receiver) else: route = self.get_classical_route(sender, receiver) if len(route) < 2: raise Exception('No route exists') elif len(route) == 2: if packet.protocol != protocols.RELAY: if packet.protocol == protocols.REC_EPR: host_sender = self.get_host(sender) q = host_sender \ .backend \ .create_EPR(host_sender.host_id, receiver, q_id=packet.payload['q_id'], block=packet.payload['blocked']) host_sender.add_epr(receiver, q) self.ARP[receiver].rec_packet(packet) else: self.ARP[receiver].rec_packet(packet.payload) else: if packet.protocol == protocols.REC_EPR: q_id = packet.payload['q_id'] blocked = packet.payload['blocked'] q_route = self.get_quantum_route(sender, receiver) if self.use_ent_swap: DaemonThread(self._entanglement_swap, args=(sender, receiver, q_route, q_id, packet.seq_num, blocked)) else: DaemonThread(self._establish_epr, args=(sender, receiver, q_id, packet.seq_num, blocked)) else: network_packet = self._encode(route, packet) self.ARP[route[1]].rec_packet(network_packet) except nx.NodeNotFound: Logger.get_instance().error( "route couldn't be calculated, node doesn't exist") except ValueError: Logger.get_instance().error( "route couldn't be calculated, value error") except Exception as e: Logger.get_instance().error('Error in network: ' + str(e))
def _entanglement_swap(self, sender, receiver, route, q_id, o_seq_num, blocked): """ Performs a chain of entanglement swaps with the hosts between sender and receiver to create a shared EPR pair between sender and receiver. Args: sender (Host): Sender of the EPR pair receiver (Host): Receiver of the EPR pair route (list): Route between the sender and receiver q_id (str): Qubit ID of the sent EPR pair o_seq_num (int): The original sequence number blocked (bool): If the pair being distributed is blocked or not """ host_sender = self.get_host(sender) def establish_epr(net, s, r): if not net.shares_epr(s, r): self.get_host(s).send_epr(r, q_id, await_ack=True) else: old_id = self.get_host(s).change_epr_qubit_id(r, q_id) net.get_host(r).change_epr_qubit_id(route[i], q_id, old_id) # Create EPR pairs on the route, where all EPR qubits have the id q_id threads = [] for i in range(len(route) - 1): threads.append( DaemonThread(establish_epr, args=(self, route[i], route[i + 1]))) for t in threads: t.join() for i in range(len(route) - 2): host = self.get_host(route[i + 1]) q = host.get_epr(route[0], q_id, wait=10) if q is None: print("Host is %s" % host.host_id) print("Search host is %s" % route[0]) print("Search id is %s" % q_id) print("EPR storage is") print(host.EPR_store) Logger.get_instance().error('Entanglement swap failed') return data = { 'q': q, 'eq_id': q_id, 'node': sender, 'o_seq_num': o_seq_num, 'type': protocols.EPR } if route[i + 2] == route[-1]: data = { 'q': q, 'eq_id': q_id, 'node': sender, 'ack': True, 'o_seq_num': o_seq_num, 'type': protocols.EPR } host.send_teleport(route[i + 2], None, await_ack=True, payload=data, generate_epr_if_none=False) # Change in the storage that the EPR qubit is shared with the receiver q2 = host_sender.get_epr(route[1], q_id=q_id) host_sender.add_epr(receiver, q2, q_id, blocked) Logger.get_instance().log( 'Entanglement swap was successful for pair with id ' + q_id + ' between ' + sender + ' and ' + receiver)
def main(arg_list=None): args = vars(parse_args(arg_list)) Logger.show_log_levels += args["verbose"] - args["quiet"] Logger.debug("Log level: {}".format(Logger.show_log_levels)) Logger.debug("Arguments: {}".format(args)) auto_functions = enumerate_auto_files(args["input_dir"]) Logger.info("Found auto functions: {}".format([func.get_name() for func in auto_functions])) output_path = args["output_dir"] if output_path[-1] != "/": output_path += "/" cpp_file = open(output_path + "auto_functions.cpp", "w") h_file = open(output_path + "auto_functions.h", "w") compiled_auto_functions = generate_auto_functions(auto_functions) if args["format"]: Logger.info("Formatting output files") compiled_auto_functions = [Formatter(func).get_formatted_text() for func in compiled_auto_functions] Logger.debug("Writing output files") h_file.write(compiled_auto_functions[0]) cpp_file.write(compiled_auto_functions[1]) cpp_file.close() h_file.close() Logger.info("Done :)")
def main(arg_list=None): args = vars(parse_args(arg_list)) Logger.show_log_levels += args["verbose"] - args["quiet"] Logger.debug("Log level: {}".format(Logger.show_log_levels)) Logger.debug("Arguments: {}".format(args)) auto_functions = [ Function(open(function_file).read(), get_script_dir()) for function_file in args["input_files"] ] Logger.info("Found auto functions: {}".format( [func.get_name() for func in auto_functions])) cpp_file = open(args["output_cpp"], "w") h_file = open(args["output_header"], "w") compiled_auto_functions = generate_auto_functions(auto_functions, args["output_header"]) if args["format"]: Logger.info("Formatting output files") compiled_auto_functions = [ Formatter(func).get_formatted_text() for func in compiled_auto_functions ] Logger.debug("Writing output files") h_file.write(compiled_auto_functions[0]) cpp_file.write(compiled_auto_functions[1]) cpp_file.close() h_file.close() Logger.info("Done :)")
#!/usr/bin/python import os import sys parent_dir = os.path.dirname(os.path.realpath(__file__)) + "/../../../" sys.path.append(parent_dir) from objects.logger import Logger if len(sys.argv) > 3: Logger.show_log_levels = int(sys.argv[3]) if sys.argv[1] == "error": Logger.error(sys.argv[2]) sys.exit(0) if sys.argv[1] == "warn": Logger.warn(sys.argv[2]) sys.exit(0) if sys.argv[1] == "info": Logger.info(sys.argv[2]) sys.exit(0) if sys.argv[1] == "debug": Logger.debug(sys.argv[2]) sys.exit(0)
import re import asyncio from dateutil.relativedelta import relativedelta from datetime import datetime import discord from objects.bot import KiwiBot from objects.logger import Logger from constants import (ID_REGEX, USER_MENTION_OR_ID_REGEX, ROLE_OR_ID_REGEX, CHANNEL_OR_ID_REGEX, COLOUR_REGEX, TIME_REGEX) bot = KiwiBot.get_bot() logger = Logger.get_logger() async def create_subprocess_exec(*args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE): process = await asyncio.create_subprocess_exec(*args, stdout=stdout, stderr=stderr) return process, process.pid async def create_subprocess_shell(command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE): process = await asyncio.create_subprocess_shell(command,
def test_log_non_strings(self): Logger.error(1) Logger.error(-1) Logger.error(0.3) Logger.error(["a", "b", "c"]) Logger.error([1, 2, 3]) Logger.error({"test": 1, "asdf": "arst"}) Logger.error(Logger) Logger.error(Logger.error)
from objects.logger import Logger logger = Logger() from aiohttp import ClientSession import traceback import asyncio import time import sys import discord from objects.modulemanager import ModuleManager from objects.config import Config from objects.redisdb import RedisDB from objects.context import Context from constants import * from utils import formatters class KiwiBot(discord.AutoShardedClient): _bot = None def __init__(self, **kwargs): KiwiBot._bot = self super().__init__(status='idle', **kwargs)
def _send_teleport(packet): """ Does the measurements for teleportation of a qubit and sends the measurement results to another host. Args: packet (Packet): The packet in which to transmit. """ if 'node' in packet.payload: node = packet.payload['node'] else: node = packet.sender if 'type' in packet.payload: q_type = packet.payload['type'] else: q_type = DATA q = packet.payload['q'] host_sender = network.get_host(packet.sender) if GENERATE_EPR_IF_NONE in packet.payload and packet.payload[ GENERATE_EPR_IF_NONE]: if not network.shares_epr(packet.sender, packet.receiver): Logger.get_instance().log( 'No shared EPRs - Generating one between ' + packet.sender + " and " + packet.receiver) host_sender.send_epr(packet.receiver, q_id=q.id, await_ack=True, block=True) if 'eq_id' in packet.payload: epr_teleport = host_sender.get_epr(packet.receiver, packet.payload['eq_id'], wait=WAIT_TIME) else: epr_teleport = host_sender.get_epr(packet.receiver, wait=WAIT_TIME) assert epr_teleport is not None q.cnot(epr_teleport) q.H() m1 = q.measure() m2 = epr_teleport.measure() data = {'measurements': [m1, m2], 'type': q_type, 'node': node} if q_type == EPR: data['q_id'] = packet.payload['eq_id'] data['eq_id'] = packet.payload['eq_id'] else: data['q_id'] = q.id data['eq_id'] = epr_teleport.id if 'o_seq_num' in packet.payload: data['o_seq_num'] = packet.payload['o_seq_num'] if 'ack' in packet.payload: data['ack'] = packet.payload['ack'] packet.payload = data packet.protocol = REC_TELEPORT network.send(packet)
def main(arg_list=None): args = vars(parse_args(arg_list)) Logger.show_log_levels += args["verbose"] - args["quiet"] Logger.debug("Log level: {}".format(Logger.show_log_levels)) Logger.debug("Arguments: {}".format(args)) auto_functions = [Function(open(function_file).read(), get_script_dir()) for function_file in args["input_files"]] Logger.info("Found auto functions: {}".format([func.get_name() for func in auto_functions])) cpp_file = open(args["output_cpp"], "w") h_file = open(args["output_header"], "w") compiled_auto_functions = generate_auto_functions(auto_functions, args["output_header"]) if args["format"]: Logger.info("Formatting output files") compiled_auto_functions = [Formatter(func).get_formatted_text() for func in compiled_auto_functions] Logger.debug("Writing output files") h_file.write(compiled_auto_functions[0]) cpp_file.write(compiled_auto_functions[1]) cpp_file.close() h_file.close() Logger.info("Done :)")
def exploration_voisinage(solution, n=1, max_depth=6, crit_stagnation=50): """ Explore le voisinage d'une solution :param solution: Solution :param n: int :param max_depth: int :param crit_stagnation: double :return: DiGraph """ graph = nx.MultiDiGraph() temps1 = time.time() ecart = 10 Fichier = Logger( solution.instance, "exploration_voisinage", **{ "n": n, "max_depth": max_depth, "crit_stagnation": crit_stagnation, "Séquence de départ": solution.sequence }) to_be_explored = [] solution.voisinage() opti = solution.makeSpan to_be_explored.append(solution) graph.add_node(solution.nom, makespan=solution.makeSpan) depth, alert, compt = 0, 0, 0 explored = [] best = solution.sequence while to_be_explored: if depth < max_depth: if alert == crit_stagnation: Fichier.addLine("Stagnation du makespan") break else: depth = to_be_explored[0].depth to_be_explored, explored, make_min, seq = explore_deeper( depth, to_be_explored, explored, graph, n, Fichier) if make_min < opti: alert = 0 opti = make_min best = seq else: alert += 1 compt += 1 else: Fichier.addLine("Profondeur atteinte") break temps = time.time() - temps1 Fichier.makespanFile(opti, best) Fichier.itFile(compt) Fichier.tpsFile(temps) return graph