def draw_edges_for_as(ISDs, ISD, AS, ISDs_done, node_labels, scion_graph): AS_list = ISDs[ISD]["AS"] ia = ISD_AS.from_values(ISD, AS) id = ia.__str__() for interface in AS_list[AS]["inter_n"]: neighborISD = AS_list[AS]["inter_n"][interface]["n_isd"] neighborAS = AS_list[AS]["inter_n"][interface]["n_as"] # check if neighbor ISD was already handled if AS_list[AS]["inter_n"][interface]["n_isd"] not in ISDs_done: # check if neighbor exists (in case it is referenced but folder does not exist) if neighborISD not in ISDs: continue if neighborAS not in ISDs[neighborISD]["AS"]: continue n_ia = ISD_AS.from_values(neighborISD, neighborAS) n_id = n_ia.__str__() if node_labels: color = get_color() remote = get_remote_interface(ISDs[neighborISD]["AS"][neighborAS], AS_list[AS]["inter_n"][interface]["br-ip"], \ AS_list[AS]["inter_n"][interface]["br-port"]) headlabel = '<<font color="' + color + '">' + str( remote[0]) + ": " + str(remote[1]) + '</font>>' taillabel = '<<font color="' + color + '">' + str(interface) + ': ' + \ str(AS_list[AS]["inter_n"][interface]["br-port"]) + '</font>>' scion_graph.edge(id, n_id, color=color, _attributes={ 'constraint': 'false', 'headlabel': headlabel, 'taillabel': taillabel }) else: scion_graph.edge(id, n_id, _attributes={'constraint': 'false'})
def draw_edges_from_current(self, current_neighbors, graph, edge_labels): """ Adds edges for all ASes in current_neighbors :param: self, array of ASes: list of ASes whose edges we draw, graphviz graph: graph to which we add the edges, boolean edge_labels: boolean indicating if we add labels to the edges """ next_neighbors = [] for AS in current_neighbors: self.ASes_done.append(AS) ia = ISD_AS.from_values(self.ISD, AS) id = ia.__str__() for neighbor in self.AS_list[AS]["intra_n"]: n_dict = self.AS_list[AS]["intra_n"][neighbor] # check if neighbor exists if neighbor not in self.AS_list: continue # check if we have not drawn edges for the neighbor if neighbor not in self.ASes_done: n_ia = ISD_AS.from_values(self.ISD, neighbor) n_id = n_ia.__str__() if edge_labels: headlabel = str(self.AS_list[neighbor]["intra_n"][AS]["br-id"]) taillabel = str(n_dict["br-id"]) graph.edge(id, n_id, _attributes={'headlabel': headlabel, 'taillabel': taillabel}) else: graph.edge(id, n_id) # add node to next rotation if not in current/next if neighbor not in current_neighbors: if neighbor not in next_neighbors: next_neighbors.append(neighbor) return next_neighbors
def _find_core_segs(self, src_isd, dst_isd, as_pairs): """ Given a set of AS pairs across 2 ISDs, return the core segments connecting those pairs """ core_segs = [] for src_core_as, dst_core_as in as_pairs: src_ia = ISD_AS.from_values(src_isd, src_core_as) dst_ia = ISD_AS.from_values(dst_isd, dst_core_as) if src_ia == dst_ia: continue seg = self.core_segments(first_ia=dst_ia, last_ia=src_ia) if seg: core_segs.extend(seg) return core_segs
def request_missing_trcs(self, seg_meta): """ For all missing TRCs which are missing to verify this pcb/path segment, request them. Request is sent to certificate server, if the pcb/path segment was received by zk. Otherwise the sender of this pcb/path segment is asked. """ missing_trcs = set() with seg_meta.miss_trc_lock: missing_trcs = seg_meta.missing_trcs.copy() if not missing_trcs: return for isd, ver in missing_trcs: with self.req_trcs_lock: if (isd, ver) in self.requested_trcs: continue self.requested_trcs.add((isd, ver)) isd_as = ISD_AS.from_values(isd, 0) trc_req = TRCRequest.from_values(isd_as, ver) logging.info("Requesting %sv%s TRC", isd, ver) if not seg_meta.meta: meta = self.get_cs() if meta: self.send_meta(trc_req, meta) else: self.send_meta(trc_req, seg_meta.meta)
def generate_prometheus_config(tp, local_gen_path, as_path): """ Writes Prometheus configuration files for the given AS. Currently only generates for border routers. :param dict tp: the topology of the AS provided as a dict of dicts. :param str local_gen_path: The gen path of scion-web. :param str as_path: The path of the given AS. """ router_list = [] for router in tp['BorderRouters'].values(): router_list.append("%s:%s" % (router['Addr'], router['Port'])) targets_path = os.path.join(as_path, PrometheusGenerator.PROM_DIR, PrometheusGenerator.BR_TARGET_FILE) target_config = [{'targets': router_list}] write_file(targets_path, yaml.dump(target_config, default_flow_style=False)) write_prometheus_config_file(as_path, [targets_path]) # Create the config for the top level gen directory as well. file_paths = [] all_ases = AD.objects.all() for as_obj in all_ases: ia = ISD_AS.from_values(as_obj.isd_id, as_obj.as_id) targets_path = os.path.join( get_elem_dir(local_gen_path, ia, ""), PrometheusGenerator.PROM_DIR, PrometheusGenerator.BR_TARGET_FILE) file_paths.append(targets_path) write_prometheus_config_file(local_gen_path, file_paths)
def save_all_topologies(request): """ Generate topology files for all ASes or specific ASes in a ISD. :param HttpRequest request: Django HTTP request passed on through urls.py :returns: Django HTTP Response object. :rtype: HttpResponse. """ current_page = request.META.get('HTTP_REFERER') topology_params = request.POST.copy() isd_list = topology_params.getlist('ISD') for isd in isd_list: for ad_obj in AD.objects.filter(isd_id=isd): isd_as_obj = ISD_AS.from_values(ad_obj.isd_id, ad_obj.as_id) isd_as = str(isd_as_obj) topo_dict = ad_obj.original_topology # write the topology file create_local_gen(isd_as, topo_dict) addr_list = [] cloud_engine_list = [] host_name_list = [] for cloud in CloudMachine.objects.filter(ad_id=ad_obj): addr_list.append(cloud.addr) cloud_engine_list.append(cloud.cloud_provider) host_name_list.append(cloud.host_name) topology_params.setlist('inputCloudAddress', addr_list) topology_params.setlist('inputCloudEngine', cloud_engine_list) topology_params.setlist('inputHostname', host_name_list) commit_hash = ad_obj.commit_hash # sanitize commit hash from comments, take first part up to |, strip spaces commit_hash = (commit_hash.split('|'))[0].strip() generate_ansible_hostfile(topology_params, topo_dict, isd_as, commit_hash) return redirect(current_page)
def _request_missing_trcs(self, seg_meta): """ For all missing TRCs which are missing to verify this pcb/path segment, request them. Request is sent to certificate server, if the pcb/path segment was received by zk. Otherwise the sender of this pcb/path segment is asked. """ missing_trcs = set() with seg_meta.miss_trc_lock: missing_trcs = seg_meta.missing_trcs.copy() if not missing_trcs: return for isd, ver in missing_trcs: with self.req_trcs_lock: if (isd, ver) in self.requested_trcs: continue isd_as = ISD_AS.from_values(isd, 0) trc_req = TRCRequest.from_values(isd_as, ver, cache_only=True) meta = seg_meta.meta or self._get_cs() if not meta: logging.error("Couldn't find a CS to request TRC for PCB %s", seg_meta.seg.short_id()) continue logging.info("Requesting %sv%s TRC from %s, for PCB %s", isd, ver, meta, seg_meta.seg.short_id()) with self.req_trcs_lock: self.requested_trcs[(isd, ver)] = (time.time(), meta) self.send_meta(trc_req, meta)
def prep_approved_join_reply(request, join_rep_dict, own_isdas, own_as_obj): """ Prepares the join reply for the APPROVED case. """ logger.info("New AS ID = %s", request.POST['newASId']) joining_as = request.POST['newASId'] is_core = request.POST['join_as_a_core'] sig_pub_key = from_b64(request.POST['sig_pub_key']) enc_pub_key = from_b64(request.POST['enc_pub_key']) signing_as_sig_priv_key = from_b64(own_as_obj.sig_priv_key) joining_ia = ISD_AS.from_values(own_isdas[0], joining_as) if is_core.lower() == "true": validity = Certificate.CORE_AS_VALIDITY_PERIOD comment = "Core AS Certificate" else: validity = Certificate.AS_VALIDITY_PERIOD comment = "AS Certificate" cert = Certificate.from_values( str(joining_ia), str(own_isdas), INITIAL_TRC_VERSION, INITIAL_CERT_VERSION, comment, is_core, validity, enc_pub_key, sig_pub_key, SigningKey(signing_as_sig_priv_key) ) respond_ia_chain = CertificateChain.from_raw(own_as_obj.certificate) request_ia_chain = CertificateChain([cert, respond_ia_chain.core_as_cert]) join_rep_dict['JoiningIA'] = str(joining_ia) join_rep_dict['IsCore'] = is_core.lower() == "true" join_rep_dict['RespondIA'] = str(own_isdas) join_rep_dict['JoiningIACertificate'] = request_ia_chain.to_json() join_rep_dict['RespondIACertificate'] = respond_ia_chain.to_json() join_rep_dict['TRC'] = TRC.from_raw(own_as_obj.trc).to_json() logger.debug("Accepting Join Request = %s", join_rep_dict)
def _send_trc_request(self, isd, ver, as_): isd_as = ISD_AS.from_values(isd, as_) trc_req = TRCRequest.from_values(isd_as, ver, cache_only=True) path_meta = self._get_path_via_api(isd_as) if path_meta: meta = self._build_meta(isd_as, host=SVCType.CS_A, path=path_meta.fwd_path()) self.send_meta(trc_req, meta) logging.info("TRC request sent to %s via [%s]: %s", meta, path_meta.short_desc(), trc_req.short_desc()) else: logging.warning("TRC request not sent for %s: no path found.", trc_req.short_desc())
def _fetch_trc(self, key, info): isd, ver = key isd_as = ISD_AS.from_values(isd, info[2]) trc_req = TRCRequest.from_values(isd_as, ver) req_pkt = self._build_packet(SVCType.CS_A, payload=trc_req) next_hop, port = self._get_next_hop(isd_as, True, False, True) if next_hop: self.send(req_pkt, next_hop, port) logging.info("TRC request sent for %sv%s.", *key) else: logging.warning("TRC request not sent for %sv%s: " "no destination found.", *key)
def heartbeat(): """ The main function that updates the topology configurations """ is_modified = False ia_list = utils._get_my_asid() resp, err = request_server(ia_list) if err: logging.error("Failed to connect to SCION-COORD server: \n%s" % err) exit(1) elif resp.headers['content-type'] == 'application/json; charset=utf-8': resp_dict = json.loads(resp.content.decode('utf8').replace("'", '"')) ia_list = resp_dict["IAList"] new_br_list = [] for ia in ia_list: connection_dict = ia["Connections"] _isd = ia["ISD"] _as = ia["AS"] ia = ISD_AS.from_values(_isd, _as) as_obj, original_topo = utils.load_topology(ia) topo = original_topo logging.info("Received answer from Heartbeat function : \n%s" % resp_dict) # check for new neighbors for connection in connection_dict: if connection["Status"] == CREATE: is_modified = True topo = utils._add_br(connection, topo) new_br_list.append(utils._get_br_id(connection,topo)[0]) elif connection["Status"] == UPDATE: is_modified = True topo = utils._update_br(connection, topo) elif connection["Status"] == REMOVE: is_modified = True topo = utils._remove_br(connection, topo) if not is_modified: # no change logging.info("Nothing changed not Restarting SCION") else: utils.generate_local_gen(ia, as_obj, topo) logging.info("[INFO] Restarting SCION") utils.restart_scion() # In case we receive the gen folder from the coordinator elif resp.headers['content-type'] == 'application/gzip': logging.info("[INFO] Received gen folder ") utils.parse_response(resp) logging.info("[INFO] Starting SCION !") utils.restart_scion() else: # Received something else # TODO UPDATE BOX ? pass
def draw_node_without_attributes(self, AS, core, graph): """ Adds a node without any attributes to the graph. :param: self, string AS: AS ID, boolean core: inidicates if the AS is core graphviz graph: graph to which we add the AS """ ia = ISD_AS.from_values(self.ISD, AS) node_id = ia.__str__() node_name = self.AS_list[AS]["name"] node_name = ia.__str__() + node_name if core: node_name = node_name + " (core)" graph.node(node_id, node_name, _attributes={'shape': 'box'})
def _mk_if_info(self, if_id): """ Small helper method to make it easier to deal with ingress/egress interface being 0 while building ASMarkings. """ d = {"remote_ia": ISD_AS.from_values(0, 0), "remote_if": 0, "mtu": 0} if not if_id: return d br = self.ifid2br[if_id] d["remote_ia"] = br.interfaces[if_id].isd_as d["remote_if"] = br.interfaces[if_id].to_if_id d["mtu"] = br.interfaces[if_id].mtu return d
def __init__(self, AS, AS_number, ISD): # ex: info_dict[br] = ip address of br # ex: reverse_info_dict[1.3.3.3] = ['zk', 'bs'] self.info_dict = {} self.rev_info_dict = {} self.AS = AS self.AS_n = AS_number self.ISD = ISD self.IA = ISD_AS.from_values(self.ISD, self.AS_n) self.gather_non_br_info() self.gather_intra_br_info() self.gather_inter_br_info() self.info_string = self.assemble_string
def draw_edges_for_as(ISDs, ISD, AS, ISDs_done, edge_labels, scion_graph): """ Draws all the inter ISD edges for a list of ASes. :param: string ISD: ISD of the AS, string AS: AS number of the AS, array AS_list: contains information of the AS, graphviz scion_graph: graph of the topology, array ISDs_done: array of ISDs whose inter isd edges have been drawn, array ISDs: list of ISDs, boolean edge_labels: indicates if edge labels are drawn """ AS_list = ISDs[ISD]["AS"] ia = ISD_AS.from_values(ISD, AS) id = ia.__str__() for neighbor_ISD in AS_list[AS]["inter_n"]: if neighbor_ISD not in ISDs_done: for neighbor in AS_list[AS]["inter_n"][neighbor_ISD]: # check if neighbor exits o_as = ISDs[ISD]["AS"][AS]["inter_n"] n_as = ISDs[neighbor_ISD]["AS"][neighbor]["inter_n"] if neighbor_ISD not in ISDs: continue if neighbor not in ISDs[neighbor_ISD]["AS"]: continue n_ia = ISD_AS.from_values(neighbor_ISD, neighbor) neighbor_id = n_ia.__str__() if edge_labels: taillabel = str(o_as[neighbor_ISD][neighbor]["br-id"]) headlabel = str(n_as[ISD][AS]["br-id"]) scion_graph.edge(id, neighbor_id, _attributes={ 'constraint': 'false', 'headlabel': headlabel, 'taillabel': taillabel }) else: scion_graph.edge(id, neighbor_id, _attributes={'constraint': 'false'})
def _get_my_asid(): """ Load ISDAS information running on the local machine :returns: a list of ISD_AS objects """ isdas_list = [] for directory in os.listdir(GEN_PATH): if 'ISD' in directory: for folder in os.listdir(os.path.join(GEN_PATH, directory)): if 'AS' in folder: isdas = ISD_AS.from_values(int(directory[3:]), int(folder[2:])) isdas_list.append(isdas) return isdas_list
def _generate_toplevel_prom_config(local_gen_path): """ Generates the top level prometheus config file. :param str local_gen_path: The gen path of scion-web. """ job_dict = defaultdict(list) all_ases = AD.objects.all() for as_obj in all_ases: ia = ISD_AS.from_values(as_obj.isd_id, as_obj.as_id) for ele_type, target_file in PrometheusGenerator.TARGET_FILES.items(): targets_path = os.path.join( get_elem_dir(local_gen_path, ia, ""), PrometheusGenerator.PROM_DIR, target_file) job_dict[PrometheusGenerator.JOB_NAMES[ele_type]].append(targets_path) _write_prometheus_config_file(local_gen_path, job_dict)
def draw_edges_from_current(self, current_neighbors, graph, node_labels): """ Adds edges for all ASes in current_neighbors :param: self, array of ASes: list of ASes whose edges we draw, graphviz graph: graph to which we add the edges, boolean edge_labels: boolean indicating if we add labels to the edges """ next_neighbors = [] for AS in current_neighbors: self.ASes_done.append(AS) ia = ISD_AS.from_values(self.ISD, AS) id = ia.__str__() for interface in self.AS_list[AS]["intra_n"]: neighbor = self.AS_list[AS]["intra_n"][interface]["n_as"] # check if neighbor exists (referenced in interface but AS folder does not exist) if neighbor not in self.AS_list: continue # check if interface connects to an AS we already handled if neighbor not in self.ASes_done: n_ia = ISD_AS.from_values(self.ISD, neighbor) n_id = n_ia.__str__() if node_labels: color = self.get_color() remote = self.get_remote_interface(neighbor, self.AS_list[AS]["intra_n"][interface]["br-ip"], \ self.AS_list[AS]["intra_n"][interface]["br-port"]) headlabel = '<<font color="' + color + '">' + str(remote[0]) + ": " + str(remote[1]) + '</font>>' taillabel = '<<font color="' + color + '">' + str(interface) + ": " + \ str(self.AS_list[AS]["intra_n"][interface]["br-port"]) + '</font>>' graph.edge(id, n_id, color=color, _attributes={'headlabel': headlabel, 'taillabel': taillabel}) else: graph.edge(id, n_id) if neighbor not in current_neighbors: if neighbor not in next_neighbors: next_neighbors.append(neighbor) return next_neighbors
def accept_join_request(request, isd_as, request_id): """ Accepts the join request, assigns a new AS ID to the requesting party and creates the certificate. This function is only executed by a core AS. """ current_page = request.META.get('HTTP_REFERER') coord = get_object_or_404(OrganisationAdmin, user_id=request.user.id) logger.info("new AS name = %s isd_as = %s", request.POST['newASname'], isd_as) joining_as = request.POST['newASname'] sig_pub_key = from_b64(request.POST['sig_pub_key']) enc_pub_key = from_b64(request.POST['enc_pub_key']) own_isdas = ISD_AS(isd_as) signing_as = AD.objects.get(as_id=own_isdas[1], isd=own_isdas[0]) signing_as_sig_priv_key = from_b64(signing_as.sig_priv_key) signing_as_trc = str(signing_as.trc) joining_isdas = ISD_AS.from_values(own_isdas[0], joining_as) certificate = Certificate.from_values( str(joining_isdas), sig_pub_key, enc_pub_key, str(own_isdas), signing_as_sig_priv_key, INITIAL_CERT_VERSION, ) accept_join_dict = { "isdas": str(own_isdas), "join_reply": { "request_id": int(request_id), "joining_isdas": str(joining_isdas), "signing_isdas": str(own_isdas), "certificate": str(certificate), "trc": signing_as_trc } } logger.info("accept join dict = %s", accept_join_dict) request_url = urljoin( COORD_SERVICE_URI, posixpath.join(UPLOAD_JOIN_REPLY_SVC, coord.key, coord.secret)) headers = {'content-type': 'application/json'} try: requests.post(request_url, json=accept_join_dict, headers=headers) except requests.RequestException: logger.error("Failed to upload join reply to coordination service") return redirect(current_page)
def draw_node_without_attributes(self, AS, core, graph, location_labels, labels): """ Adds a node without any attributes to the graph. :param: self, string AS: AS ID, boolean core: inidicates if the AS is core graphviz graph: graph to which we add the AS dict labels: Dictionary containing labels for ISDs and ASes """ ia = ISD_AS.from_values(self.ISD, AS) node_id = ia.__str__() node_name = self.AS_list[AS]["name"] node_name = ia.__str__() + node_name if core: node_name += " (core)" if location_labels and ia.__str__() in labels['AS']: node_name += '\n' + labels['AS'][ia.__str__()] graph.node(node_id, node_name, _attributes={'shape': 'box'})
def _generate_toplevel_prom_config(local_gen_path): """ Generates the top level prometheus config file. :param str local_gen_path: The gen path of scion-web. """ job_dict = defaultdict(list) all_ases = AD.objects.all() for as_obj in all_ases: ia = ISD_AS.from_values(as_obj.isd_id, as_obj.as_id) for ele_type, target_file in PrometheusGenerator.TARGET_FILES.items(): targets_path = os.path.join(get_elem_dir(local_gen_path, ia, ""), PrometheusGenerator.PROM_DIR, target_file) job_dict[PrometheusGenerator.JOB_NAMES[ele_type]].append( targets_path) _write_prometheus_config_file(local_gen_path, job_dict)
def _check_trc_reqs(self): """ Checks if TRC requests timeout and resends requests if so. """ with self.req_trcs_lock: now = time.time() for (isd, ver), (req_time, meta) in self.requested_trcs.items(): if now - req_time >= self.TRC_CC_REQ_TIMEOUT: trc_req = TRCRequest.from_values(ISD_AS.from_values( isd, 0), ver, cache_only=True) logging.info("Re-Requesting TRC from %s: %s", meta, trc_req.short_desc()) self.send_meta(trc_req, meta) self.requested_trcs[(isd, ver)] = (time.time(), meta) if self._labels: PENDING_TRC_REQS_TOTAL.labels(**self._labels).set( len(self.requested_trcs))
def _get_new_br_obj(new_neighbor, topo): """ Initiating border router objects to create new border router entity :param ISD_AS my_asid: current AS number :param bool is_vpn: is this a vpn-based setup :param dict tp: current AS topology :returns: new border router id, border router port, interface id, internal address, interface address, mtu and bandwidth """ br_id = [] if_id = [] br_n = "" br_port = [] for br_name, br in topo['BorderRouters'].items(): br_id.append(int(br_name.split('-')[2])) br_n = br_name.split('-')[0] + "-" + br_name.split('-')[1] br_port.append(br['InternalAddrs'][0]['Public'][0]['L4Port']) for ifid, intf in br['Interfaces'].items(): if_id.append(int(ifid)) new_br_id = '%s-%s' % (br_n, new_neighbor["BRID"]) new_if_id = new_neighbor["BRID"] external_port = new_neighbor["RemotePort"] neighbor_addr = new_neighbor["NeighborIP"] ext_addr = get_credentials()["IP"] if new_neighbor["Linktype"] == CHILD: linktype = "CHILD" elif new_neighbor["Linktype"] == PARENT: linktype = "PARENT" else: linktype = "CORE" internal_port = new_neighbor["LocalPort"] int_addr = ni.ifaddresses(INTERFACE)[ni.AF_INET][0]['addr'] ia = ISD_AS.from_values(new_neighbor["NeighborISD"], new_neighbor["NeighborAS"]) ia = ia.__str__() new_br_port = _get_lowest_empty_id(br_port) return new_br_id, new_br_port, new_if_id, external_port, neighbor_addr, ext_addr, linktype, internal_port, int_addr, ia
def prep_con_req_dict(con_req, isd_id, as_id): """ Prepares the connection request as a dictionary to be sent to the SCION coordination service. :param ConnectionRequest con_req: Connection request object. :returns: Connection request as a dictionary. :rtype: dict """ isd_as = ISD_AS.from_values(isd_id, as_id) as_obj = get_object_or_404(AD, isd_id=isd_id, as_id=as_id) cert_chain = CertificateChain.from_raw(as_obj.certificate) con_req_dict = { "RequestId": con_req.id, "Info": con_req.info, "RequestIA": str(isd_as), "RespondIA": con_req.connect_to, "IP": con_req.router_public_ip, "OverlayType": con_req.overlay_type, "MTU": int(con_req.mtu), "Bandwidth": int(con_req.bandwidth), "Timestamp": iso_timestamp(time.time()), "Signature": "", # TODO(ercanucan): generate and set the signature "Certificate": cert_chain.to_json() } if con_req.router_public_port: con_req_dict["Port"] = int(con_req.router_public_port) # Adjust the link type for the receiving party (i.e if the requestIA # wants to have the respondIA as a PARENT, then the respondIA should # see it as a request to have a CHILD AS. if con_req.link_type == LinkType.PARENT: con_req_dict["LinkType"] = LinkType.CHILD elif con_req.link_type == LinkType.CHILD: con_req_dict["LinkType"] = LinkType.PARENT else: con_req_dict["LinkType"] = con_req.link_type return con_req_dict
def _send_pkt(self, spkt): next_hop, port = self.sd.get_first_hop(spkt) spkt.addrs.dst.isd_as = ISD_AS.from_values(3, 33) self._send_raw_pkt(spkt.pack(), next_hop, port)
def isd_as(self): return ISD_AS.from_values(self.p.isd, 0)
def get_context_data(self, **kwargs): """ Populate 'context' dictionary with the required objects """ context = super().get_context_data(**kwargs) ad = context['object'] # Status tab context['routers'] = ad.routerweb_set.select_related() context['path_servers'] = ad.pathserverweb_set.all() context['certificate_servers'] = ad.certificateserverweb_set.all() context['beacon_servers'] = ad.beaconserverweb_set.all() context['sibra_servers'] = ad.sibraserverweb_set.all() context['management_interface_ip'] = get_own_local_ip() context['reloaded_topology'] = ad.original_topology flat_string = json.dumps(ad.original_topology, sort_keys=True) # hash for non cryptographic purpose (state comparison for user warning) context['reloaded_topology_hash'] = \ hashlib.md5(flat_string.encode('utf-8')).hexdigest() context['as_id'] = ad.as_id context['isd_id'] = ad.isd_id context['isdas'] = str(ISD_AS.from_values(ad.isd_id, ad.as_id)) # Sort by name numerically lists_to_sort = [ 'routers', 'path_servers', 'certificate_servers', 'beacon_servers', 'sibra_servers' ] for list_name in lists_to_sort: context[list_name] = sorted(context[list_name], key=lambda el: el.name if el.name is not None else -1) # Permissions context['user_has_perm'] = self.request.user.has_perm('change_ad', ad) # Connection requests tab context['join_requests'] = {} context['received_requests'] = {} try: coord = OrganisationAdmin.objects.get(user_id=self.request.user.id) except OrganisationAdmin.DoesNotExist: logger.error("Retrieving key and secret failed!!.") return context request_url = urljoin( COORD_SERVICE_URI, posixpath.join(POLL_EVENTS_SVC, coord.key, coord.secret)) headers = {'content-type': 'application/json'} try: r = requests.post(request_url, json={'isdas': context['isdas']}, headers=headers) if r.status_code == 200: answer = r.json() context['join_requests'] = answer['join_requests'] context['received_requests'] = answer['conn_requests'] logger.info("join requests = %s", context['join_requests']) logger.info("conn requests = %s", context['received_requests']) except requests.RequestException: logger.info("Retrieving requests from scion-coord failed.") return context
def get_context_data(self, **kwargs): """ Populate 'context' dictionary with the required objects """ context = super().get_context_data(**kwargs) ad = context['object'] # Status tab context['services'] = ad.service_set.select_related() context['service_addrs'] = ad.serviceaddress_set.select_related() context['border_routers'] = ad.borderrouter_set.select_related() context['router_addrs'] = ad.borderrouteraddress_set.select_related() context['interface_addrs'] = ad.borderrouterinterface_set.select_related() context['management_interface_ip'] = get_own_local_ip() context['reloaded_topology'] = ad.original_topology flat_string = json.dumps(ad.original_topology, sort_keys=True) # hash for non cryptographic purpose (state comparison for user warning) context['reloaded_topology_hash'] = \ hashlib.md5(flat_string.encode('utf-8')).hexdigest() context['as_id'] = ad.as_id context['isd_id'] = ad.isd_id context['isdas'] = str(ISD_AS.from_values(ad.isd_id, ad.as_id)) # Sort by name numerically for list_name in ['services', 'border_routers']: context[list_name] = sorted( context[list_name], key=lambda el: el.name if el.name is not None else -1 ) # Sort by address numerically for list_name in ['service_addrs', 'router_addrs', 'interface_addrs']: context[list_name] = sorted( context[list_name], key=lambda el: el.addr if el.addr is not None else -1 ) # Permissions context['user_has_perm'] = self.request.user.has_perm('change_ad', ad) # Connection requests tab context['join_requests'] = {} context['received_requests'] = {} context['received_conn_replies'] = {} try: coord = OrganisationAdmin.objects.get(user_id=self.request.user.id) except OrganisationAdmin.DoesNotExist: logger.error("Retrieving account_id and secret failed!!.") return context request_url = urljoin(COORD_SERVICE_URI, posixpath.join( POLL_EVENTS_SVC, coord.account_id, coord.secret)) logger.info("Polling Events for %s", context['isdas']) r, error = post_req_to_scion_coord( request_url, {'IsdAs': context['isdas']}, "poll events for ISD-AS %s" % context['isdas']) if error: messages.error(self.request, 'Could not poll events from SCION ' 'Coordination Service!') return context resp = r.json() context['join_requests'] = resp['JoinRequests'] context['received_requests'] = resp['ConnRequests'] context['received_conn_replies'] = resp['ConnReplies'] return context