def get_neighbors(self): """ : return: Nested dictionary with intra and inter isd neighbor dictionaries """ intra_dict = {} inter_dict = {} border_routers = self.topology["BorderRouters"] for br in border_routers: if_id = self.get_neighbor_IA_interface(br)['interface'] n_IA = ISD_AS(self.get_neighbor_IA_interface(br)['IA']) n_isd = n_IA.isd_str() n_as = n_IA.as_file_fmt() if n_isd == self.ISD: intra_isd_neighbor = \ {'n_as': n_as, 'br-ip': border_routers[br]["Interfaces"][if_id]["PublicOverlay"]["Addr"], 'br-port': border_routers[br]["Interfaces"][if_id]["PublicOverlay"]["OverlayPort"], 'remote-ip': border_routers[br]["Interfaces"][if_id]["RemoteOverlay"]["Addr"], 'remote-port': border_routers[br]["Interfaces"][if_id]["RemoteOverlay"]["OverlayPort"]} intra_dict[if_id] = intra_isd_neighbor else: inter_isd_neighbor = \ {'n_isd': n_isd, 'n_as': n_as, 'br-ip': border_routers[br]["Interfaces"][if_id]["PublicOverlay"]["Addr"], 'br-port': border_routers[br]["Interfaces"][if_id]["PublicOverlay"]["OverlayPort"], 'remote-ip': border_routers[br]["Interfaces"][if_id]["RemoteOverlay"]["Addr"], 'remote-port': border_routers[br]["Interfaces"][if_id]["PublicOverlay"]["OverlayPort"]} inter_dict[if_id] = inter_isd_neighbor return {'intra': intra_dict, 'inter': inter_dict}
def create_as_obj(all_ases, as_credentials): as_objs = {} for _as in all_ases: isd_as = ISD_AS(_as) as_crd = as_credentials[_as] as_obj = AS_Object() as_obj.isd_id = isd_as.isd_str() as_obj.as_id = isd_as.as_str() # TRC/Cert trc_name = 'ISD%s-V1.trc' % isd_as.isd_str() cert_name = 'ISD%s-AS%s-V1.crt' % (isd_as.isd_str(), isd_as.as_file_fmt()) as_obj.trc = json.dumps(as_crd['certs'][trc_name]) as_obj.certificate = json.dumps(as_crd['certs'][cert_name]) # Keys keys = {} keys['sig_key'] = as_crd['keys']['as-sig.seed'] keys['enc_key'] = as_crd['keys']['as-decrypt.key'] keys['master0_as_key'] = as_crd['keys']['master0.key'] keys['master1_as_key'] = as_crd['keys']['master1.key'] as_obj.keys = keys # Core keys if 'core-sig.seed' in as_crd['keys']: core_keys = {} core_keys['core_sig_key'] = as_crd['keys']['core-sig.seed'] core_keys['online_key'] = as_crd['keys']['online-root.seed'] core_keys['offline_key'] = as_crd['keys']['offline-root.seed'] as_obj.core_keys = core_keys as_objs[_as] = as_obj return as_objs
def load_topology(self, asid): """ Reload the current topology configuration. :param str gen_path: target asid (e.g., '1-11') :returns: credentials (keys, trc, cert) and topology dictionary for the given AS """ ia = ISD_AS(asid) as_str = ia.as_file_fmt() if 'as_file_fmt' in dir(ia) else ia[1] as_path = 'ISD%s/AS%s' % (ia[0], as_str) process_path = self._get_process_path( os.path.join(self.gen_path, as_path)) try: with open(os.path.join(process_path, 'topology.json')) as topo_file: topo_dict = json.load(topo_file) with open(os.path.join(process_path, 'keys/as-sig.seed')) as sig_file: sig_priv_key = sig_file.read() with open(os.path.join(process_path, 'keys/as-decrypt.key')) as enc_file: enc_priv_key = enc_file.read() with open(os.path.join(process_path, 'keys/master0.key')) as master0_file: master0_as_key = master0_file.read() with open(os.path.join(process_path, 'keys/master1.key')) as master1_file: master1_as_key = master1_file.read() with open( sorted(glob.glob(os.path.join(process_path, 'certs/*.crt')), reverse=True)[0]) as cert_file: certificate = cert_file.read() files = sorted(glob.glob(os.path.join(process_path, 'certs/*.trc')), reverse=True) with open( sorted(glob.glob(os.path.join(process_path, 'certs/*.trc')), reverse=True)[0]) as trc_file: trc = trc_file.read() except OSError as e: print("[ERROR] Unable to open '%s': \n%s" % (e.filename, e.strerror)) exit(1) key_dict = { 'enc_key': enc_priv_key, 'sig_key': sig_priv_key, 'master0_as_key': master0_as_key, 'master1_as_key': master1_as_key, } as_obj = ASCredential(certificate, trc, key_dict) return as_obj, topo_dict
def load_topology(self, asid): """ Reload the current topology configuration. :param str gen_path: target asid (e.g., '1-11') :returns: credentials (keys, trc, cert) and topology dictionary for the given AS """ ia = ISD_AS(asid) as_str = ia.as_file_fmt() if 'as_file_fmt' in dir(ia) else ia[1] as_path = 'ISD%s/AS%s' % (ia[0], as_str) process_path = self._get_process_path( os.path.join(self.gen_path, as_path)) try: with open(os.path.join(process_path, 'topology.json')) as topo_file: topo_dict = json.load(topo_file) with open(os.path.join(process_path, 'keys/as-sig.seed')) as sig_file: sig_priv_key = sig_file.read() with open(os.path.join(process_path, 'keys/as-sig.key')) as sig_file: sig_priv_key_raw = sig_file.read() with open(os.path.join(process_path, 'keys/as-decrypt.key')) as enc_file: enc_priv_key = enc_file.read() with open( os.path.join( process_path, 'certs/ISD%s-AS%s-V%s.crt' % (ia[0], as_str, INITIAL_CERT_VERSION))) as cert_file: certificate = cert_file.read() with open( os.path.join( process_path, 'certs/ISD%s-V%s.trc' % (ia[0], INITIAL_TRC_VERSION))) as trc_file: trc = trc_file.read() with open(os.path.join(process_path, 'as.yml')) as conf_file: master_as_key = self._get_masterkey(conf_file) except OSError as e: print("[ERROR] Unable to open '%s': \n%s" % (e.filename, e.strerror)) exit(1) key_dict = { 'enc_key': enc_priv_key, 'sig_key': sig_priv_key, 'sig_key_raw': sig_priv_key_raw, 'master_as_key': master_as_key, } as_obj = ASCredential(certificate, trc, key_dict) return as_obj, topo_dict
def deploy_gen(src_path, deploy_plan): """Copy gen folder to remote machines """ for target_addr, ases in deploy_plan.items(): print("[INF] ======== Deploying new gen folder =========") print("Target: %s" % target_addr) # initialize gen folder init_remote_dir(target_addr, TARGET_PATH) # deploy general things such as dispatcher and overlay config dispatcher_path = os.path.join(src_path, 'dispatcher') overlay_path = os.path.join(src_path, 'overlay') target_path = '%s:%s/.' % (target_addr, TARGET_PATH) copy_remote(dispatcher_path, target_path) copy_remote(overlay_path, target_path) for as_ in ases: isd_as = ISD_AS(as_) as_path = 'ISD%s/AS%s' % (isd_as.isd_str(), isd_as.as_file_fmt()) target_path = os.path.join(TARGET_PATH, as_path) as_path = os.path.join(src_path, as_path, '*') init_remote_dir(target_addr, target_path) target_path = '%s:%s' % (target_addr, target_path) copy_remote(as_path, target_path)
def index(request): ''' Main index handler for index.html for main visualization page. Validates parameters, request scion data, returns formatted response. :param request: HTML request object containing url parameters. ''' p = {} # return param dictionary p['tab'] = set_param(request, 'tab', 'tab-pathtopo') p['data'] = set_param(request, 'data', 'sdapi') p['addr'] = set_param(request, 'addr', '') p['src'] = set_param(request, 'src', '') p['dst'] = set_param(request, 'dst', '') p['mp'] = set_param(request, 'mp', '5') p['err'] = '' if (p['src'] == '' and p['dst'] == ''): # use endhost gen/ia if no host specified if (p['src'] == ''): ia_file = "%s/%s/ia" % (SCION_ROOT, GEN_PATH) try: with open(ia_file, 'r') as fin: # load and reformat p['src'] = str(ISD_AS(fin.read().strip())) except (FileNotFoundError) as err: logging.warning("%s: %s" % (err.__class__.__name__, err)) return fmt_err(request, p) s_isd_as = ISD_AS(p['src']) d_isd_as = ISD_AS(p['dst']) p['src'], p['dst'] = str(s_isd_as), str(d_isd_as) # reformat csegs = dsegs = usegs = [] paths = '' logging.info("Requesting sciond data from %s to %s" % (s_isd_as, d_isd_as)) conf_dir = "%s/%s/ISD%s/AS%s/endhost" % ( SCION_ROOT, GEN_PATH, s_isd_as.isd_str(), s_isd_as.as_file_fmt()) sock_file = get_default_sciond_path(s_isd_as) if not pathlib.Path(sock_file).exists(): sock_file = get_default_sciond_path(None) try: if (p['data'] == 'sdapi'): connector[s_isd_as] = lib_sciond.init(sock_file) logging.info(connector[s_isd_as]._api_addr) try: # test if sciond is already running for this AS logging.info("Testing sciond at %s" % sock_file) lib_sciond.get_as_info(connector=connector[s_isd_as]) except (SCIONDResponseError) as err: p['err'] = "%s: %s" % (err.__class__.__name__, err) return fmt_err(request, p) except (SCIONDConnectionError, FileNotFoundError) as err: logging.warning("%s: %s" % (err.__class__.__name__, err)) # need to launch sciond, wait for uptime launch_sciond(sock_file, conf_dir, p['addr'], s_isd_as) if (p['dst'] != ''): # PATHS try: # get paths and keep segments flags = lib_sciond.PathRequestFlags(flush=False, sibra=False) paths = lib_sciond.get_paths(d_isd_as, max_paths=int(p['mp']), flags=flags, connector=connector[s_isd_as]) csegs = lib_sciond.get_segtype_hops( PST.CORE, connector=connector[s_isd_as]) dsegs = lib_sciond.get_segtype_hops( PST.DOWN, connector=connector[s_isd_as]) usegs = lib_sciond.get_segtype_hops( PST.UP, connector=connector[s_isd_as]) # refresh old segments for next call flags = lib_sciond.PathRequestFlags(flush=True, sibra=False) lib_sciond.get_paths(d_isd_as, max_paths=int(p['mp']), flags=flags, connector=connector[s_isd_as]) except (SCIONDResponseError, SCIONDConnectionError, AttributeError) as err: # AttributeError handles backward-compatability logging.error("%s: %s" % (err.__class__.__name__, err)) p['err'] = str(err) p['json_as_topo'] = json.dumps( get_json_as_topology_sciond(connector[s_isd_as], paths)) p['json_trc'] = ("TRC information for sciond not yet implemented.") p['json_crt'] = ( "Certificate information for sciond not yet implemented.") elif (p['data'] == 'file'): t = Topology.from_file(os.path.join(conf_dir, TOPO_FILE)) topo = organize_topo(t) p['json_as_topo'] = json.dumps(get_json_as_topology(t, topo)) p['json_trc'] = html_jsonfile(findCerts(conf_dir, ".trc")) p['json_crt'] = html_jsonfile(findCerts(conf_dir, ".crt")) p['path_info'] = get_as_view_html(paths, csegs, usegs, dsegs) p['json_path_topo'] = json.dumps( get_json_path_segs(paths, csegs, usegs, dsegs)) p['json_seg_topo'] = json.dumps( get_json_all_segments(csegs, usegs, dsegs)) p['json_paths'] = json.dumps(get_json_paths(paths)) except (SCIONBaseError) as err: p['err'] = "%s: %s" % (err.__class__.__name__, err) return fmt_err(request, p) return render(request, 'asviz/index.html', p)