def create_local_gen(topologies, as_objs, exp_path, bf=5, bs=5): for _as in topologies.keys(): isd_as = TopoID(_as) as_obj = as_objs[_as] topo = topologies[_as] write_dispatcher_config(exp_path) as_path = 'ISD%s/AS%s/' % (isd_as[0], isd_as.as_file_fmt()) as_path = get_elem_dir(exp_path, isd_as, "") rmtree(as_path, True) # assert isinstance(isd_as, TopoID), type(isd_as) write_toml_files(topo, isd_as, exp_path) try: del topo['Sciond'] except KeyError: print("[ERR] 'sciond' not found in topology") for service_type, type_key in TYPES_TO_KEYS.items(): executable_name = TYPES_TO_EXECUTABLES[service_type] instances = topo[type_key].keys() for instance_name in instances: config = prep_supervisord_conf(topo[type_key][instance_name], executable_name, service_type, instance_name, isd_as) instance_path = get_elem_dir(exp_path, isd_as, instance_name) write_certs_trc_keys(isd_as, as_obj, instance_path) write_as_conf_and_path_policy(isd_as, as_obj, instance_path, bf, bs) write_supervisord_config(config, instance_path) write_topology_file(topo, type_key, instance_path) write_zlog_file(service_type, instance_name, instance_path) generate_sciond_config(isd_as, as_obj, topo, exp_path) write_overlay_config(exp_path) return
def _gen_as_certs(self, topo_id, as_conf): # Self-signed if cert_issuer is missing. issuer = TopoID(as_conf.get('cert_issuer', str(topo_id))) # Make sure that issuer is a core AS if issuer not in self.pub_online_root_keys: raise SCIONParseError("Certificate issuer is not a core AS: %s" % issuer) # Create core AS certificate if self.is_core(as_conf): signing_key = self.priv_online_root_keys[topo_id] can_issue = True comment = "Core AS Certificate" self.core_certs[topo_id] = Certificate.from_values( str(topo_id), str(issuer), INITIAL_TRC_VERSION, INITIAL_CERT_VERSION, comment, can_issue, DEFAULT_CORE_CERT_VALIDITY, self.enc_pub_keys[topo_id], self.pub_core_sig_keys[topo_id], signing_key ) # Create regular AS certificate signing_key = self.priv_core_sig_keys[issuer] can_issue = False comment = "AS Certificate" self.certs[topo_id] = Certificate.from_values( str(topo_id), str(issuer), INITIAL_TRC_VERSION, INITIAL_CERT_VERSION, comment, can_issue, DEFAULT_LEAF_CERT_VALIDITY, self.enc_pub_keys[topo_id], self.sig_pub_keys[topo_id], signing_key, issuing_time=int(time.time())+2, )
def connection_request_action(request, con_req_id): """ Responds to the received connection request with a connection reply. :param HttpRequest request: Django HTTP request. :param str con_req_id: The ID of the connection request :returns: HttpResponse depending on the outcome of sending the connection reply :rtype: HttpResponse """ posted_data = request.POST respond_ia = TopoID(posted_data['RespondIA']) respond_as = get_object_or_404(AD, isd=respond_ia[0], as_id=respond_ia[1]) _check_user_permissions(request, respond_as) if '_approve_request' in request.POST: send_connection_reply(request, con_req_id, REQ_APPROVED, respond_as, posted_data) return redirect(reverse('ad_detail_topology_routers', args=[respond_as.as_id])) elif '_decline_request' in request.POST: send_connection_reply(request, con_req_id, REQ_DECLINED, respond_as, posted_data) return redirect(reverse('ad_connection_requests', args=[respond_as.as_id])) return HttpResponseNotFound('Invalid connection request action')
def prep_approved_join_reply(request, join_rep_dict, own_isdas, own_as_obj): """ Prepares the join reply for the APPROVED case. """ logger.info("New AS ID = %s", request.POST['newASId']) joining_as = request.POST['newASId'] is_core = request.POST['join_as_a_core'] sig_pub_key = from_b64(request.POST['sig_pub_key']) enc_pub_key = from_b64(request.POST['enc_pub_key']) signing_as_sig_priv_key = from_b64(own_as_obj.sig_priv_key) joining_ia = TopoID.from_values(own_isdas[0], joining_as) if is_core.lower() == "true": validity = Certificate.CORE_AS_VALIDITY_PERIOD comment = "Core AS Certificate" else: validity = Certificate.AS_VALIDITY_PERIOD comment = "AS Certificate" cert = Certificate.from_values( str(joining_ia), str(own_isdas), INITIAL_TRC_VERSION, INITIAL_CERT_VERSION, comment, is_core, validity, enc_pub_key, sig_pub_key, SigningKey(signing_as_sig_priv_key) ) respond_ia_chain = CertificateChain.from_raw(own_as_obj.certificate) request_ia_chain = CertificateChain([cert, respond_ia_chain.core_as_cert]) join_rep_dict['JoiningIA'] = str(joining_ia) join_rep_dict['IsCore'] = is_core.lower() == "true" join_rep_dict['RespondIA'] = str(own_isdas) join_rep_dict['JoiningIACertificate'] = request_ia_chain.to_json() join_rep_dict['RespondIACertificate'] = respond_ia_chain.to_json() join_rep_dict['TRC'] = TRC.from_raw(own_as_obj.trc).to_json() logger.debug("Accepting Join Request = %s", join_rep_dict)
def send_join_reply(request, status, isd_as, request_id): """ Accepts or declines the join request. In case of accept, it assigns a new AS ID to the requesting party and creates the certificate. This function is only executed by a core AS. """ current_page = request.META.get('HTTP_REFERER') coord = get_object_or_404(OrganisationAdmin, user_id=request.user.id) own_isdas = TopoID(isd_as) own_as_obj = AD.objects.get(as_id=own_isdas[1], isd=own_isdas[0]) if not own_as_obj.is_core_ad: logging.error("%s has to be a core AS to send join reply" % own_as_obj) return redirect(current_page) join_rep_dict = { 'RequestId': int(request_id), 'Status': status, 'RespondIA': str(own_isdas), 'RequesterId': request.POST['requester'] } if status == REQ_APPROVED: prep_approved_join_reply(request, join_rep_dict, own_isdas, own_as_obj) else: logger.debug("Declining Join Request = %s", join_rep_dict) request_url = urljoin(COORD_SERVICE_URI, posixpath.join( UPLOAD_JOIN_REPLY_SVC, coord.account_id, coord.secret)) _, error = post_req_to_scion_coord(request_url, join_rep_dict, "join reply %s" % request_id) if error: return error return redirect(current_page)
def write_topology(self, asid, as_obj, tp): """ Creates the usual gen folder structure for an ISD/AS under gen :param str asid: ISD-AS as a string :param obj as_obj: An object that stores crypto information for AS :param dict tp: the topology parameter file as a dict of dicts """ ia = TopoID(asid) as_path = get_elem_dir(self.gen_path, ia, "") rmtree(as_path, True) os.chdir(os.path.dirname(self.gen_path) ) # functions from $SC/python/topology use relative paths write_dispatcher_config(self.gen_path) write_toml_files(tp, ia) for service_type, type_key in TYPES_TO_KEYS.items(): executable_name = TYPES_TO_EXECUTABLES[service_type] if type_key not in tp: continue instances = tp[type_key].keys() for instance_name in instances: config = prep_supervisord_conf(tp[type_key][instance_name], executable_name, service_type, instance_name, ia) instance_path = get_elem_dir(self.gen_path, ia, instance_name) write_certs_trc_keys(ia, as_obj, instance_path) write_as_conf_and_path_policy(ia, as_obj, instance_path) write_supervisord_config(config, instance_path) write_topology_file(tp, type_key, instance_path) write_zlog_file(service_type, instance_name, instance_path) # We don't need to create zk configration for existing ASes # generate_zk_config(tp, ia, GEN_PATH, simple_conf_mode=False) generate_sciond_config(ia, as_obj, tp, self.gen_path) generate_prom_config(ia, tp, self.gen_path)
def _build_chains(self): for topo_id, cert in self.certs.items(): chain = [cert] issuer = TopoID(cert.issuer) chain.append(self.core_certs[issuer]) cert_path = get_cert_chain_file_path("", topo_id, INITIAL_CERT_VERSION) self.cert_files[topo_id][cert_path] = CertificateChain(chain).to_json() assert isinstance(topo_id, TopoID) map_path = os.path.join("customers", '%s-%s-V%d.key' % ( topo_id.ISD(), topo_id.AS_file(), INITIAL_CERT_VERSION)) self.cust_files[issuer][map_path] = base64.b64encode( self.sig_pub_keys[topo_id]).decode()
def add_to_topology(request): """ Adds the router information which comes with a connection reply into the topology of the AS. :param HttpRequest request: Django HTTP request. """ con_reply = json.loads(request.body.decode('utf-8')) # find the corresponding connection request from DB try: con_req = ConnectionRequest.objects.get(id=con_reply['RequestId']) except ConnectionRequest.DoesNotExist: logger.error("Connection request for reply with ID %s not found", con_reply['RequestId']) return HttpResponseNotFound("Connection request for reply %s not found" % con_reply['RequestId']) # find the corresponding router ip = con_req.router_public_ip port = con_req.router_public_port try: router_intf = BorderRouterInterface.objects.get(addr=ip, l4port=port) router_addr = router_intf.router_addr router = router_addr.router except BorderRouterAddress.DoesNotExist: logger.error("Router for connection reply with ID %s not found.", con_reply['RequestId']) return HttpResponseNotFound("Router for connection reply with ID %s " "not found." % con_reply['RequestId']) isd_id, as_id = TopoID(con_reply['RequestIA']) try: req_ia = AD.objects.get(isd_id=isd_id, as_id=as_id) except AD.DoesNotExist: logger.error("AS %s was not found." % con_reply['RequestIA']) return HttpResponseNotFound("AS %s was not found" % con_reply['RequestIA']) topo = req_ia.original_topology interface = topo['BorderRouters'][router.name]['Interfaces'][str(router_intf.interface_id)] interface['Remote']['Addr'] = con_reply['IP'] if "UDP" in con_reply['OverlayType']: interface['Remote']['L4Port'] = con_reply['Port'] # TODO(ercanucan): verify the other parameters of the request as well? req_ia.save() # write the updated topology file create_local_gen(con_reply['RequestIA'], topo) # save the data into DB req_ia.fill_from_topology(topo, clear=True) return HttpResponse("Successfully added to topology of %s" % router.name)
def handle_join_reply(request, reply, jr_id): """ Handles the join reply coming through the SCION Coordination Service. :param HttpRequest request: Django Http Request passed on via the urls.py :param dict reply: Join Reply represented as dictionary. :param int jr_id: The ID of the join request. """ join_reply = reply.json() if join_reply == {}: logger.info("Empty join reply for join request %s.", jr_id) return if join_reply['Status'] == REQ_APPROVED: # get the join request object which belong to this request # so that we can save the keys into the AS table. jr = JoinRequest.objects.get(id=jr_id) new_as = TopoID(join_reply['JoiningIA']) master_as_key = base64.b64encode(Random.new().read(16)) isd, _ = ISD.objects.get_or_create(id=int(new_as[0])) AD.objects.update_or_create( as_id=new_as[1], isd=isd, is_core_ad=join_reply['IsCore'], is_open=False, certificate=join_reply['JoiningIACertificate'], trc=join_reply['TRC'], sig_pub_key=jr.sig_pub_key, sig_priv_key=jr.sig_priv_key, enc_pub_key=jr.enc_pub_key, enc_priv_key=jr.enc_priv_key, master_as_key=master_as_key.decode("utf-8") ) messages.success(request, 'Created new AS: %s.' % new_as) else: messages.info(request, 'Your join request with ID %s is declined ' 'by AS %s.' % (jr_id, join_reply['RespondIA'])) # update join request's status based on the received join reply JoinRequest.objects.filter(id=jr_id).update(status=join_reply['Status'])
def save_all_topologies(request): """ Generate topology files for all ASes or specific ASes in a ISD. :param HttpRequest request: Django HTTP request passed on through urls.py :returns: Django HTTP Response object. :rtype: HttpResponse. """ current_page = request.META.get('HTTP_REFERER') topology_params = request.POST.copy() isd_list = topology_params.getlist('ISD') for isd in isd_list: for ad_obj in AD.objects.filter(isd_id=isd): isd_as = TopoID.from_values(ad_obj.isd_id, ad_obj.as_id) topo_dict = ad_obj.original_topology # TODO: in the DB there is at least one entry (ffaa:0:1306) with {} if len(topo_dict) == 0: continue # write the topology file create_local_gen(isd_as, topo_dict) addr_list = [] cloud_engine_list = [] host_name_list = [] for cloud in CloudMachine.objects.filter(ad_id=ad_obj): addr_list.append(cloud.addr) cloud_engine_list.append(cloud.cloud_provider) host_name_list.append(cloud.host_name) topology_params.setlist('inputCloudAddress', addr_list) topology_params.setlist('inputCloudEngine', cloud_engine_list) topology_params.setlist('inputHostname', host_name_list) commit_hash = ad_obj.commit_hash # sanitize commit hash from comments, take first part up to |, strip spaces commit_hash = (commit_hash.split('|'))[0].strip() generate_ansible_hostfile(topology_params, topo_dict, isd_as, commit_hash) return redirect(current_page)
def prep_con_req_dict(con_req, isd_id, as_id): """ Prepares the connection request as a dictionary to be sent to the SCION coordination service. :param ConnectionRequest con_req: Connection request object. :returns: Connection request as a dictionary. :rtype: dict """ isd_as = TopoID.from_values(isd_id, as_id) as_obj = get_object_or_404(AD, isd_id=isd_id, as_id=as_id) cert_chain = CertificateChain.from_raw(as_obj.certificate) con_req_dict = { "RequestId": con_req.id, "Info": con_req.info, "RequestIA": str(isd_as), "RespondIA": con_req.connect_to, "IP": con_req.router_public_ip, "OverlayType": con_req.overlay_type, "MTU": int(con_req.mtu), "Bandwidth": int(con_req.bandwidth), "Timestamp": iso_timestamp(time.time()), "Signature": "", # TODO(ercanucan): generate and set the signature "Certificate": cert_chain.to_json() } if con_req.router_public_port: con_req_dict["Port"] = int(con_req.router_public_port) # Adjust the link type for the receiving party (i.e if the requestIA # wants to have the respondIA as a PARENT, then the respondIA should # see it as a request to have a CHILD AS. if con_req.link_type == LinkType.PARENT: con_req_dict["LinkType"] = LinkType.CHILD elif con_req.link_type == LinkType.CHILD: con_req_dict["LinkType"] = LinkType.PARENT else: con_req_dict["LinkType"] = con_req.link_type return con_req_dict
def _iterate(self, f): for isd_as, as_conf in self.args.config["ASes"].items(): f(TopoID(isd_as), as_conf)
def _self_sign_keys(self): topo_id = TopoID.from_values(0, 0) self.sig_pub_keys[topo_id], self.sig_priv_keys[topo_id] = generate_sign_keypair() self.enc_pub_keys[topo_id], self.enc_priv_keys[topo_id] = generate_enc_keypair()
def _register_sigs(self): for isd_as, _ in self.args.topo_config_dict["ASes"].items(): topo_id = TopoID(isd_as) self._reg_addr(topo_id, "sig" + topo_id.file_fmt()) self._reg_addr(topo_id, "tester_" + topo_id.file_fmt())
def generate_topology(request): # TODO(ercanucan): This function should be refactored into smaller pieces. topology_params = request.POST.copy() topology_params.pop('csrfmiddlewaretoken', None) # remove csrf entry, as we don't need it here topo_dict = {} tp = topology_params isd_as = TopoID(tp['inputISD_AS']) topo_dict['Core'] = True if (tp['inputIsCore'] == 'on') else False service_types = ['BeaconService', 'CertificateService', 'PathService'] for s_type in service_types: topo_dict[s_type] = \ name_entry_dict(tp.getlist('input{}Name'.format(s_type)), tp.getlist('input{}Address'.format(s_type)), tp.getlist('input{}Port'.format(s_type)), tp.getlist('input{}InternalAddress'.format(s_type)), tp.getlist('input{}InternalPort'.format(s_type)), ) topo_dict['BorderRouters'] = name_entry_dict_router(tp) topo_dict['ISD_AS'] = tp['inputISD_AS'] topo_dict['MTU'] = st_int(tp['inputMTU'], DEFAULT_MTU) # TODO(jonghoonkwon): We currently assume that the overlay network is 'UDP/IPv4' topo_dict['Overlay'] = 'UDP/IPv4' # Zookeeper special case s_type = 'ZookeeperServer' zk_dict = name_entry_dict_zk(tp.getlist('input{}Name'.format(s_type)), tp.getlist('input{}Address'.format(s_type)), tp.getlist('input{}Port'.format(s_type)), tp.getlist( 'input{}InternalAddress'.format(s_type)), tp.getlist('input{}InternalPort'.format(s_type)), ) named_keys = list(zk_dict.keys()) # copy 'named' keys int_key = 1 # dict keys get replaced with numeric keys, 1 based for key in named_keys: zk_dict[int_key] = zk_dict.pop(key) int_key += 1 topo_dict['ZookeeperService'] = zk_dict # IP:port uniqueness in AS check all_ip_port_pairs = get_all_ip_port_pairs(topo_dict, service_types) if len(all_ip_port_pairs) != len(set(all_ip_port_pairs)): return JsonResponse( {'data': 'IP:port combinations not unique within AS'}) create_local_gen(isd_as, topo_dict) commit_hash = tp['commitHash'] # sanitize commit hash from comments, take first part up to |, strip spaces commit_hash = (commit_hash.split('|'))[0].strip() generate_ansible_hostfile(topology_params, topo_dict, isd_as, commit_hash) curr_as = get_object_or_404(AD, as_id=isd_as[1], isd=isd_as[0]) # load as usual model (for persistance and display in overview) # TODO : hash displayed queryset and curr_as query set and compare # allow the user to write back the new configuration only if it hasn't # changed in the meantime curr_as.fill_from_topology(topo_dict, clear=True) curr_as.fill_cloud_info(topology_params) current_page = request.META.get('HTTP_REFERER') return redirect(current_page)