def __generate_release_notes(self): log = self.__get_commit_log() self.release_notes.generate(self.version_num, log) util.write_file(self.release_notes.absolute_filepath, str(self.release_notes)) self.git.add(self.release_notes.absolute_filepath, self.props.wiki_path)
def create_new_ad_files(parent_ad_topo, isd_id, ad_id, out_dir): assert isinstance(parent_ad_topo, dict), 'Invalid topology dict' isd_ad_id = '{}-{}'.format(isd_id, ad_id) ad_dict = { "default_zookeepers": {"1": {"manage": False, "addr": "localhost"}}, isd_ad_id: {'level': 'LEAF'}, } gen = ConfigGenerator(out_dir=out_dir) path_policy_file = DEFAULT_PATH_POLICY_FILE zk_config = DEFAULT_ZK_CONFIG # Write basic config files for the new AD with tempfile.NamedTemporaryFile('w') as temp_fh: json.dump(ad_dict, temp_fh) temp_fh.flush() gen.generate_all(temp_fh.name, path_policy_file, zk_config) # Copy TRC file trc_path = get_some_trc_path(isd_id) if trc_path: dst_path = get_trc_file_path(isd_id, ad_id, isd_id, 0, isd_dir=out_dir) shutil.copyfile(trc_path, dst_path) new_topo_path = gen.path_dict(isd_id, ad_id)['topo_file_abs'] new_topo_file = read_file(new_topo_path) new_topo = json.loads(new_topo_file) existing_topo, new_topo = link_topologies(parent_ad_topo, new_topo, 'PARENT_CHILD') # Update the config files for the new AD write_file(new_topo_path, json.dumps(new_topo, sort_keys=4, indent=4)) gen.write_derivatives(new_topo) return new_topo, existing_topo
def _write_prometheus_config_file(path, job_dict): """ Writes a Prometheus configuration file into the given path generates for border routers. :param str path: The path to write the configuration file into. :param dict job_dict: A dictionary mapping from job name to a list of file paths to be provided to file_sd_configs field of the configuration file. """ scrape_configs = [] for job_name, file_paths in job_dict.items(): scrape_configs.append({ 'job_name': job_name, 'file_sd_configs': [{'files': file_paths}], }) config = { 'global': { 'scrape_interval': '5s', 'evaluation_interval': '5s', 'external_labels': { 'monitor': 'scion-monitor' } }, 'scrape_configs': scrape_configs } write_file(os.path.join(path, PROM_FILE), yaml.dump(config, default_flow_style=False))
def _write_prometheus_target_file(base_path, target_addrs, ele_type): """ Writes the target file into the given path. :param str base_path: The base path of the target file. :param list target_addrs: A list of target addresses. :param str ele_type: The type of the infrastructure element. """ targets_path = os.path.join( base_path, PrometheusGenerator.PROM_DIR, PrometheusGenerator.TARGET_FILES[ele_type]) target_config = [{'targets': target_addrs}] write_file(targets_path, yaml.dump(target_config, default_flow_style=False))
def test_write_file_directory_exists(self, mock_exists): path = "/some/file/path/" filepath = path + "file_name.txt" content = "some content" mock_exists.return_value = True mock_open = mock.mock_open() with mock.patch('__builtin__.open', mock_open, create=True): util.write_file(filepath, content) mock_exists.assert_called_once_with(path) mock_open.assert_called_once_with(filepath, 'w')
def parse_gen_folder(gen_folder, output_path): """ Parses a gen folder to a nested dict example gen folder: ISD1/AS2/,AS1/;ISD2/AS1/,AS3/ -> dictionary {'ISD':{1:{'AS':{2:{..},1:{..}}2:{'AS':{1:{..}2:{..}}} :param: String gen_folder: filepath to the gen folder :return: Nested Dictionary with all the information to draw the graph """ gen_dict = {} gen_dict['ISD'] = {} inside_gen_folder = os.listdir(gen_folder) for directory in inside_gen_folder: if directory[:3] == 'ISD': isd_number = directory[3:] isd_dict = parse_isd_folder( isd_number, gen_folder + "/" + directory) gen_dict['ISD'][isd_number] = isd_dict write_file(os.path.join(output_path, 'output.json'), json.dumps(gen_dict, sort_keys=True, indent=4)) return gen_dict
def main(in_path, out_path): cwd = os.getcwd() def relpath(path): return os.path.relpath(path, cwd) with util.TemporaryDirectory() as temp_dir: temp_deps_path = os.path.join(temp_dir, 'deps') temp_mk_path = os.path.join(temp_dir, 'mk') temp_files_path = os.path.join(temp_dir, 'files') _, out_ext = os.path.splitext(out_path) # OpenSCAD requires the output file name to end in .stl or .dxf. temp_out_path = os.path.join(temp_dir, 'out' + out_ext) _openscad(in_path, temp_out_path, temp_deps_path) mk_content = '%:; echo "$@" >> {}'.format(util.bash_escape_string(temp_files_path)) # Use make to parse the dependency makefile written by OpenSCAD. util.write_file(temp_mk_path, mk_content.encode()) util.command( ['make', '-s', '-B', '-f', temp_mk_path, '-f', temp_deps_path], remove_env=['MAKELEVEL', 'MAKEFLAGS']) # All dependencies as paths relative to the project root. deps = set(map(relpath, util.read_file(temp_files_path).decode().splitlines())) # Relative paths to all files that should not appear in the # dependency makefile. ignored_files = set(map(relpath, [in_path, temp_deps_path, temp_mk_path, temp_out_path])) # Write output files. make.write_dependencies(out_path + '.d', out_path, deps - ignored_files) util.rename_atomic(temp_out_path, out_path)
def _write_trust_files(self, topo_dicts, cert_files): for topo_id, as_topo, base in _srv_iter(topo_dicts, self.out_dir, common=True): for path, value in cert_files[topo_id].items(): write_file(os.path.join(base, path), value)
def generate(self): dc_conf = self._generate_dc() os.makedirs(os.path.join(self.local_jaeger_dir, 'data'), exist_ok=True) os.makedirs(os.path.join(self.local_jaeger_dir, 'key'), exist_ok=True) write_file(os.path.join(self.args.output_dir, JAEGER_DC), yaml.dump(dc_conf, default_flow_style=False))
def _write_target_file(self, base_path, target_addrs, ele_type): targets_path = os.path.join(base_path, self.PROM_DIR, self.TARGET_FILES[ele_type]) target_config = [{'targets': target_addrs}] write_file(targets_path, yaml.dump(target_config, default_flow_style=False))
def _write_zlog_cfg(self, name, elem, elem_dir): tmpl = Template(read_file("topology/zlog.tmpl")) cfg = os.path.join(elem_dir, "%s.zlog.conf" % elem) write_file(cfg, tmpl.substitute(name=name, elem=elem))
def create_local_gen(isd_as, tp): """ creates the usual gen folder structure for an ISD/AS under web_scion/gen, ready for Ansible deployment Args: isd_as: isd-as string tp: the topology parameter file as a dict of dicts """ # looks up the name of the executable for the service, # certificate server -> 'cert_server', ... lkx = lookup_dict_executables() isd_id, as_id = isd_as.split('-') local_gen_path = os.path.join(WEB_ROOT, 'gen') # Add the dispatcher folder in sub/web/gen/ if not already there dispatcher_folder_path = os.path.join(local_gen_path, 'dispatcher') if not os.path.exists(dispatcher_folder_path): copytree(os.path.join(PROJECT_ROOT, 'deploy-gen', 'dispatcher'), dispatcher_folder_path) # TODO: Cert distribution needs integration with scion-coord, # using bruteforce copying over some gen certs and # matching keys to get Ansible testing # before integration with scion-coord shared_files_path = os.path.join(local_gen_path, 'shared_files') rmtree(os.path.join(shared_files_path), True) # rm shared_files & content # populate the shared_files folder with the relevant files for this AS certgen_path = os.path.join( PROJECT_ROOT, 'deploy-gen/ISD{}/AS{}/endhost/'.format(isd_id, as_id)) copytree(certgen_path, shared_files_path) # remove files that are not shared try: os.remove(os.path.join(shared_files_path, 'supervisord.conf')) except OSError: pass try: os.remove(os.path.join(shared_files_path, 'topology.yml')) except OSError: pass try: as_path = 'ISD{}/AS{}/'.format(isd_id, as_id) as_path = os.path.join(local_gen_path, as_path) rmtree(as_path, True) except OSError: pass types = [ 'beacon_server', 'certificate_server', 'router', 'path_server', 'sibra_server', 'zookeeper_service' ] # 'domain_server', # tmp fix # until the discovery replaces it dict_keys = [ 'BeaconServers', 'CertificateServers', 'BorderRouters', 'PathServers', 'SibraServers', 'Zookeepers' ] types_keys = zip(types, dict_keys) zk_name_counter = 1 for service_type, type_key in types_keys: executable_name = lkx[service_type] replicas = tp[type_key].keys( ) # SECURITY WARNING:allows arbitrary path # the user can enter arbitrary paths for his output # Mitigation: make path at least relative executable_name = os.path.normpath('/' + executable_name).lstrip('/') for serv_name in replicas: config = configparser.ConfigParser() # replace serv_name if zookeeper special case (they have only ids) if service_type == 'zookeeper_service': serv_name = '{}{}-{}-{}'.format('zk', isd_id, as_id, zk_name_counter) zk_name_counter += 1 config['program:' + serv_name] = \ {'startsecs': '5', 'command': '"bin/{0}" "{1}" "gen/ISD{2}/AS{3}/{1}"'.format( executable_name, serv_name, isd_id, as_id), 'startretries': '0', 'stdout_logfile': 'logs/' + str(serv_name) + '.OUT', 'redirect_stderr': 'true', 'autorestart': 'false', 'environment': 'PYTHONPATH=.', 'autostart': 'false', 'stdout_logfile_maxbytes': '0'} # replace command entry if zookeeper special case if service_type == 'zookeeper_service': zk_config_path = os.path.join(PROJECT_ROOT, 'topology', 'Zookeeper.yml') zk_config = {} with open(zk_config_path, 'r') as stream: try: zk_config = yaml.load(stream) except (yaml.YAMLError, KeyError): zk_config = '' # TODO: give user feedback, add TC class_path = zk_config['Environment']['CLASSPATH'] zoomain_env = zk_config['Environment']['ZOOMAIN'] command_string = '"java" "-cp" ' \ '"gen/{1}/{2}/{0}:{3}" ' \ '"-Dzookeeper.' \ 'log.file=logs/{0}.log" ' \ '"{4}" ' \ '"gen/ISD{1}/AS{2}/{0}/' \ 'zoo.cfg"'.format(serv_name, isd_id, as_id, class_path, zoomain_env) config['program:' + serv_name]['command'] = command_string node_path = 'ISD{}/AS{}/{}'.format(isd_id, as_id, serv_name) node_path = os.path.join(local_gen_path, node_path) # os.makedirs(node_path, exist_ok=True) if not os.path.exists(node_path): copytree(os.path.join(shared_files_path), node_path) conf_file_path = os.path.join(node_path, 'supervisord.conf') with open(conf_file_path, 'w') as configfile: config.write(configfile) # copy AS topology.yml file into node one_of_topology_path = os.path.join(node_path, 'topology.yml') one_of_topology = particular_topo_instance(tp, type_key) with open(one_of_topology_path, 'w') as file: yaml.dump(one_of_topology, file, default_flow_style=False) # copy(yaml_topo_path, node_path) # Do not share global topology # as each node get its own topology file # create zlog file tmpl = Template( read_file(os.path.join(PROJECT_ROOT, "topology/zlog.tmpl"))) cfg = os.path.join(node_path, "%s.zlog.conf" % serv_name) write_file(cfg, tmpl.substitute(name=service_type, elem=serv_name)) # Generating only the needed intermediate parts # not used as for now we generator.py all certs and keys resources # Add endhost folder for all ASes node_path = 'ISD{}/AS{}/{}'.format(isd_id, as_id, 'endhost') node_path = os.path.join(local_gen_path, node_path) if not os.path.exists(node_path): copytree(os.path.join(shared_files_path), node_path) copy(yaml_topo_path, node_path)
def _generate_disp_cfg(self, elem, topo_id): elem_dir = os.path.join(topo_id.base_dir(self.args.output_dir), elem) cfg = "%s/dispatcher.zlog.conf" % elem_dir tmpl = Template(read_file("topology/zlog.tmpl")) write_file(cfg, tmpl.substitute(name="dispatcher", elem=elem))
def _write_cust_files(self, topo_dicts, cust_files): for topo_id, as_topo in topo_dicts.items(): base = os.path.join(self.out_dir, topo_id.ISD(), topo_id.AS()) for elem in as_topo["CertificateService"]: for path, value in cust_files[topo_id].items(): write_file(os.path.join(base, elem, path), value)
def __generate_index(self): self.index.generate(self.version_num) util.write_file(self.index.absolute_filepath, str(self.index)) self.git.add(self.index.absolute_filepath, self.props.wiki_path)
def test(self): """ Create a certificate chain and verify it with a TRC file. Sign a message with the private key of the last certificate in the chain and verify it. """ cert10 = CertificateChain(get_cert_chain_file_path(1, 10, 1, 10, 0)) trc = TRC(get_trc_file_path(1, 10, 1, 0)) print('TRC verification', trc.verify()) print('Cert Chain verification:', cert10.verify('ISD:1-AD:10', trc, 0)) sig_priv10 = read_file(get_sig_key_file_path(1, 10)) sig_priv10 = base64.b64decode(sig_priv10) msg = b'abcd' sig = sign(msg, sig_priv10) print('Sig test:', verify_sig_chain_trc(msg, sig, 'ISD:1-AD:10', cert10, trc, 0)) sig_priv13 = read_file(get_sig_key_file_path(1, 13)) sig_priv13 = base64.b64decode(sig_priv13) msg = b'abd' sig = sign(msg, sig_priv13) CertificateChain.from_values([]) print('Sig test 2:', verify_sig_chain_trc(msg, sig, 'ISD:1-AD:13', cert10, trc, 0), '\n') topology = Topology.from_file( "topology/ISD1/topologies/ISD:1-AD:10.json") src_addr = SCIONAddr.from_values(topology.isd_id, topology.ad_id, IPv4Address("127.0.0.1")) dst_addr = topology.certificate_servers[0].addr sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((str(src_addr.host_addr), SCION_UDP_PORT)) print("Sending TRC request (ISD:1-V:0) to local CS.") msg = TRCRequest.from_values( PT.TRC_REQ_LOCAL, src_addr, topology.parent_border_routers[0].interface.if_id, topology.isd_id, topology.ad_id, 1, 0).pack() sock.sendto(msg, (str(dst_addr), SCION_UDP_PORT)) temp_file = './temp.txt' timeout = 5 ready = select.select([sock], [], [], timeout) if not ready[0]: print("Error: no TRC reply was received!") sock.close() return data, _ = sock.recvfrom(SCION_BUFLEN) print("Received TRC reply from local CS.") trc_reply = TRCReply(data) write_file(temp_file, trc_reply.trc.decode('utf-8')) trc = TRC(temp_file) assert trc.verify() print("Sending cert chain request (ISD:1-AD:16-V:0) to local CS.") msg = CertChainRequest.from_values( PT.CERT_CHAIN_REQ_LOCAL, src_addr, topology.parent_border_routers[0].interface.if_id, topology.isd_id, topology.ad_id, 1, 16, 0).pack() sock.sendto(msg, (str(dst_addr), SCION_UDP_PORT)) ready = select.select([sock], [], [], timeout) if not ready[0]: print("Error: no cert chain reply was received!") sock.close() return data, _ = sock.recvfrom(SCION_BUFLEN) print("Received cert chain reply from local CS.") cert_chain_reply = CertChainReply(data) write_file(temp_file, cert_chain_reply.cert_chain.decode('utf-8')) cert_chain = CertificateChain(temp_file) assert cert_chain.verify('ISD:1-AD:16', trc, 0) os.remove(temp_file) sock.close()
def _write_dependencies(path, target, dependencies): util.write_file(path, '{}: {}\n'.format(target, ' '.join(dependencies)).encode())
def generate_br(self): for topo_id, topo in self.args.topo_dicts.items(): for k, v in topo.get("BorderRouters", {}).items(): base = topo_id.base_dir(self.args.output_dir) br_conf = self._build_br_conf(topo_id, topo["ISD_AS"], base, k, v) write_file(os.path.join(base, k, BR_CONFIG_NAME), toml.dumps(br_conf))
def generate_sciond(self): for topo_id, topo in self.args.topo_dicts.items(): base = topo_id.base_dir(self.args.output_dir) sciond_conf = self._build_sciond_conf(topo_id, topo["ISD_AS"], base) write_file(os.path.join(base, COMMON_DIR, SD_CONFIG_NAME), toml.dumps(sciond_conf))
def _write_as_list(self): list_path = os.path.join(self.out_dir, AS_LIST_FILE) write_file(list_path, yaml.dump(dict(self.as_list)))
def _write_ifids(self): list_path = os.path.join(self.out_dir, IFIDS_FILE) write_file(list_path, yaml.dump(self.ifid_map, default_flow_style=False))
def approve_request(ad, ad_request): # Create the new AD new_id = AD.objects.latest('id').id + 1 new_ad = AD.objects.create(id=new_id, isd=ad.isd, md_host=ad_request.router_public_ip) parent_topo_dict = ad.generate_topology_dict() with tempfile.TemporaryDirectory() as temp_dir: new_topo_dict, parent_topo_dict = create_new_ad_files(parent_topo_dict, new_ad.isd.id, new_ad.id, out_dir=temp_dir) # Adjust router ips/ports # if ad_request.router_public_ip is None: # ad_request.router_public_ip = ad_request.router_bound_ip if ad_request.router_public_port is None: ad_request.router_public_port = ad_request.router_bound_port _, new_topo_router = find_last_router(new_topo_dict) # new_topo_router['Interface']['Addr'] = ad_request.router_bound_ip # new_topo_router['Interface']['UdpPort'] = ad_request.router_bound_port _, parent_topo_router = find_last_router(parent_topo_dict) parent_router_if = parent_topo_router['Interface'] parent_router_if['ToAddr'] = ad_request.router_public_ip parent_router_if['UdpPort'] = ad_request.router_public_port new_ad.fill_from_topology(new_topo_dict, clear=True) ad.fill_from_topology(parent_topo_dict, clear=True) # Update the new topology on disk: # Write new config files to disk, regenerate everything else # FIXME(rev112): minor duplication, see ad_connect.create_new_ad_files() gen = ConfigGenerator(out_dir=temp_dir) new_topo_path = gen.path_dict(new_ad.isd.id, new_ad.id)['topo_file_abs'] write_file(new_topo_path, json.dumps(new_topo_dict, sort_keys=4, indent=4)) gen.write_derivatives(new_topo_dict) # Resulting package will be stored here package_dir = os.path.join('gen', 'AD' + str( new_ad)) # os.path.join(PACKAGE_DIR_PATH, # 'AD' + str(new_ad)) TODO: replace ad_management functionality if os.path.exists(package_dir): rmtree(package_dir) os.makedirs(package_dir) # Prepare package # package_name = 'scion_package_AD{}-{}'.format(new_ad.isd, new_ad.id) # config_dirs = [os.path.join(temp_dir,x) for x in os.listdir(temp_dir)] # ad_request.package_path = prepare_package(out_dir=package_dir, # config_paths=config_dirs, # package_name=package_name) ad_request.new_ad = new_ad ad_request.status = 'APPROVED' ad_request.save() # Give permissions to the user request_creator = ad_request.created_by assign_perm('change_ad', request_creator, new_ad) new_ad.save() ad.save()
def _write_target_file(self, base_path, router_addrs): targets_path = os.path.join(base_path, self.PROM_DIR, self.BR_TARGET_FILE) target_config = [{'targets': router_addrs}] write_file(targets_path, yaml.dump(target_config, default_flow_style=False))
def _write_overlay(self): file_path = os.path.join(self.args.output_dir, OVERLAY_FILE) write_file(file_path, self.overlay + '\n')
def _write_hosts(self): text = StringIO() for intf, domain in self.hosts: text.write("%s\tds.%s\n" % (intf.ip, str(domain).rstrip("."))) hosts_path = os.path.join(self.out_dir, HOSTS_FILE) write_file(hosts_path, text.getvalue())