def load(path): """ Loads the service package contents from the given path. :param path: path to a folder with service package contents. :return: SonataService object. """ # load manifest manifest = read_yaml( os.path.join(path, "META-INF/MANIFEST.MF")) # load nsd nsd = read_yaml( os.path.join( path, relative_path(manifest.get("entry_service_template")))) # load vnfds vnfd_list = list() for ctx in manifest.get("package_content"): if "function_descriptor" in ctx.get("content-type"): vnfd_list.append( read_yaml( os.path.join(path, relative_path(ctx.get("name"))))) # add some meta information metadata = dict() metadata["project_disk_path"] = path # create SonataServicePackage object s = SonataService(manifest, nsd, vnfd_list, metadata) LOG.info( "Loaded SONATA service package contents: {} ({} VNFDs).".format( s, len(vnfd_list))) # create SonataServicePackage object return s
def load(pkg_path): """ Loads the service package contents from the given path. :param pkg_path: path to a folder with service package contents. :return: SonataServicePackage object. """ # load manifest manifest = read_yaml( os.path.join(pkg_path, "META-INF/MANIFEST.MF")) # load nsd nsd = read_yaml( os.path.join( pkg_path, relative_path(manifest.get("entry_service_template")))) # load vnfds vnfd_list = list() for ctx in manifest.get("package_content"): if "function_descriptor" in ctx.get("content-type"): vnfd_list.append( read_yaml( os.path.join(pkg_path, relative_path(ctx.get("name"))))) LOG.info("Loaded SONATA service package contents (%d VNFDs)." % len(vnfd_list)) # create SonataServicePackage object return SonataServicePackage(pkg_path, manifest, nsd, vnfd_list)
def load(path): """ Loads the service package contents from the given path. :param path: path to a folder with service package contents. :return: SonataService object. """ # load manifest manifest = read_yaml(os.path.join(path, "META-INF/MANIFEST.MF")) # load nsd nsd = read_yaml( os.path.join(path, relative_path( manifest.get("entry_service_template")))) # load vnfds vnfd_list = list() for ctx in manifest.get("package_content"): if "function_descriptor" in ctx.get("content-type"): vnfd_list.append( read_yaml( os.path.join(path, relative_path(ctx.get("name"))))) # add some meta information metadata = dict() metadata["project_disk_path"] = path # create SonataServicePackage object s = SonataService(manifest, nsd, vnfd_list, metadata) LOG.info( "Loaded SONATA service package contents: {} ({} VNFDs).".format( s, len(vnfd_list))) # create SonataServicePackage object return s
def load(pkg_path): """ Loads the service package contents from the given path. :param pkg_path: path to a folder with service package contents. :return: SonataServicePackage object. """ # load manifest manifest = read_yaml(os.path.join(pkg_path, "META-INF/MANIFEST.MF")) # load nsd nsd = read_yaml( os.path.join(pkg_path, relative_path( manifest.get("entry_service_template")))) # load vnfds vnfd_list = list() for ctx in manifest.get("package_content"): if "function_descriptor" in ctx.get("content-type"): vnfd_list.append( read_yaml( os.path.join(pkg_path, relative_path(ctx.get("name"))))) LOG.info("Loaded SONATA service package contents (%d VNFDs)." % len(vnfd_list)) # create SonataServicePackage object return SonataServicePackage(pkg_path, manifest, nsd, vnfd_list)
def _config_prometheus(remove=False): global prometheus_server_api global prometheus_config_path docker_cli = docker.from_env() # check if containers are already running c1 = docker_cli.containers.list(filters={'status': 'running', 'name': 'prometheus'}) if len(c1) < 1: LOG.info('Prometheus is not running') return "Prometheus DB is not running" # make Prometheus scrape this server config_file = read_yaml(prometheus_config_path) targets = config_file.get('scrape_configs', []) SP_stream_config = next((target for target in targets if target.get('job_name') == 'SP_stream'), None) # the SP http server is not yet added to the config file config_dict = {'job_name': 'SP_stream', 'scrape_interval': '1s', 'static_configs': [{'targets': ['172.17.0.1:{}'.format(prometheus_stream_port)]}]} if not SP_stream_config and not remove: config_file['scrape_configs'].append(config_dict) LOG.info('added SP stream to Prometheus') elif remove and SP_stream_config: config_file['scrape_configs'].remove(config_dict) LOG.info('removed SP stream from Prometheus') write_yaml(prometheus_config_path, config_file) post(prometheus_server_api + '/-/reload')
def _embed_function_into_experiment_nsd( self, service, ec, template="template/sonata_nsd_function_experiment.yml"): """ Generates a NSD that contains the single VNF of the given function experiment and embeds the specified function into it. The new NSD overwrites the existing NSD in service. This unifies the follow up procedures for measurement point inclusion etc. The NSD template for this can be found in the template/ folder. """ template_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), template) new_nsd = read_yaml(template_path) # 1. update VNF section old_vnf_dict = None for vnf in service.nsd.get("network_functions"): if str(vnf.get("vnf_name")) in ec.experiment.function: old_vnf_dict = vnf if old_vnf_dict is None: LOG.error("Couldn't find function '{}' in service '{}'".format( ec.experiment.function, service )) new_vnf_dict = new_nsd.get("network_functions")[0] new_vnf_dict.update(old_vnf_dict) LOG.debug("Updated VNF section in '{}': {}".format(service, new_vnf_dict)) # 1.5 remove obsolete VNFDs old_list = service.vnfd_list.copy() service.vnfd_list = list() for vnfd in old_list: if vnfd.get("name") == new_vnf_dict.get("vnf_name"): service.vnfd_list.append(vnfd) LOG.debug("Updated VNFD list in '{}': {}".format(service, service.vnfd_list)) # 2. update virtual link section (get first three CPs from VNFD) # TODO remove order assumptions (current version is more a HACK!) vnfd = service.get_vnfd_by_uid(ec.experiment.function) new_link_list = new_nsd.get("virtual_links") cp_ids = [cp.get("id") for cp in vnfd.get("connection_points")] for i in range(0, min(len(new_link_list), len(cp_ids))): cpr = new_link_list[i]["connection_points_reference"] for j in range(0, len(cpr)): if "test_vnf" in cpr[j]: cpr[j] = "{}:{}".format(new_vnf_dict.get("vnf_id"), cp_ids[i]) LOG.debug("Updated VLink section in '{}': {}".format(service, new_link_list)) # 3. update forwarding path section # TODO remove order assumptions (current version is more a HACK!) for fg in new_nsd.get("forwarding_graphs"): fg.get("constituent_vnfs")[0] = new_vnf_dict.get("vnf_id") for nfp in fg.get("network_forwarding_paths"): nfp_cp_list = nfp.get("connection_points") for i in range(1, min(len(nfp_cp_list), len(cp_ids))): if "test_vnf" in nfp_cp_list[i].get("connection_point_ref"): nfp_cp_list[i]["connection_point_ref"] = "{}:{}".format(new_vnf_dict.get("vnf_id"), cp_ids[i]) LOG.debug("Updated forwarding graph section in '{}': {}".format(service, new_nsd.get("forwarding_graphs"))) # 4. replace NSD service.nsd = new_nsd
def _active_execution(self): # generate service configuration using the specified generator module if not self.args.no_generation: # select and instantiate configuration generator cgen = None if self.args.service_generator == "sonata": from son.profile.generator.sonata import SonataServiceConfigurationGenerator cgen = SonataServiceConfigurationGenerator(self.args) else: LOG.error( "Unknown service configuration generator specified: {0}".format( self.args.service_generator)) exit(1) if cgen is None: LOG.error("Service configuration generator instantiation failed.") exit(1) # generate one service configuration for each experiment based # on the service referenced in the PED file. gen_conf_list = cgen.generate( os.path.join( # ensure that the reference is an absolute path os.path.dirname( self.ped.get("ped_path", "/")), self.ped.get("service_package")), self.function_experiments, self.service_experiments, self.work_dir) LOG.debug("Generation result: {}".format(gen_conf_list)) # display generator statistics if not self.args.no_display: cgen.print_generation_and_packaging_statistics() # # @Edmaas dict 'gen_conf_list' holds the generation data you need. # # Execute the generated packages if not self.args.no_execution: if not gen_conf_list: LOG.error("No generated packages, stopping execution") raise Exception("Cannot execute experiments: No generated packages") # get config file and read remote hosts description config_loc = self.args.config if not os.path.isabs(config_loc): config_loc = os.path.join( os.path.dirname(os.path.abspath(__file__)), config_loc) remote_hosts = read_yaml(config_loc).get("target_platforms") # start the experiment series profiler = Active_Emu_Profiler(remote_hosts) profiler.do_experiment_series(gen_conf_list)
def display_graph(self, file=None): if file: self.results = read_yaml(file) plt.close("all") plt.ioff() logging.info("profile graphs:{}".format(self.profile_graphs)) n = len(self.profile_graphs) plt.subplots(nrows=n, ncols=1) i = 1 for profile in self.profile_graphs: plt.subplot(n, 1, i) x_metric_id = profile['input_metric'] y_metric_id = profile['output_metric'] x_metrics = self._find_metrics(x_metric_id, self.results) x_values = [m.median for m in x_metrics] x_err_high = [m.CI['max'] - m.median for m in x_metrics] x_err_low = [abs(m.CI['min'] - m.median) for m in x_metrics] x_unit = x_metrics[0].unit x_list = [m.list_values for m in x_metrics] x_scatter = [value for sublist in x_list for value in sublist] y_metrics = self._find_metrics(y_metric_id, self.results) y_values = [m.median for m in y_metrics] y_err_high = [m.CI['max'] - m.median for m in y_metrics] y_err_low = [abs(m.CI['min'] - m.median) for m in y_metrics] y_unit = y_metrics[0].unit y_list = [m.list_values for m in y_metrics] y_scatter = [value for sublist in y_list for value in sublist] plt.xlabel('{0}({1})'.format(x_metric_id, x_unit)) plt.ylabel('{0}({1})'.format(y_metric_id, y_unit)) plt.xlabel('{0}({1})'.format(x_metric_id, x_unit)) plt.ylabel('{0}({1})'.format(y_metric_id, y_unit)) plt.title(profile['name']) logging.info("plot: x={} - y={}".format(x_metric_id, y_metric_id)) plt.grid(b=True, which='both', color='lightgrey', linestyle='--') plt.errorbar(x_values, y_values, xerr=[x_err_low, x_err_high], yerr=[y_err_low, y_err_high], fmt='--o', capsize=2, color='red') plt.scatter(x_scatter, y_scatter) i += 1 plt.tight_layout() plt.show()
def measurement_point_to_vnfd(mp, ec, template="template/sonata_vnfd_mp.yml"): """ Generates a VNFD data structure using measurement point information from a PED file. VNFD is based on given template. """ template_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), template) vnfd = read_yaml(template_path) # replace placeholder fields (this highly depends on used template!) vnfd["name"] = mp.get("name") # allow different containers as parameter study vnfd["virtual_deployment_units"][0]["vm_image"] = ec.parameter.get( "measurement_point:{}:container".format(mp.get("name"))) return vnfd
def measurement_point_to_vnfd(mp, ec, template="template/sonata_vnfd_mp.yml"): """ Generates a VNFD data structure using measurement point information from a PED file. VNFD is based on given template. """ template_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), template) vnfd = read_yaml(template_path) # replace placeholder fields (this highly depends on used template!) vnfd["name"] = mp.get("name") # allow different containers as parameter study vnfd["virtual_deployment_units"][0]["vm_image"] = ec.parameter.get( "measurement_point:{}:container".format(mp.get("name"))) return vnfd
def display_graph(self, file=None): if file: self.results = read_yaml(file) plt.close("all") plt.ioff() logging.info("profile graphs:{}".format(self.profile_graphs)) n = len(self.profile_graphs) plt.subplots(nrows=n, ncols=1) i = 1 for profile in self.profile_graphs: plt.subplot(n, 1, i) x_metric_id = profile['input_metric'] y_metric_id = profile['output_metric'] x_metrics = self._find_metrics(x_metric_id, self.results) x_values = [m.median for m in x_metrics] x_err_high = [m.CI['max'] - m.median for m in x_metrics] x_err_low = [abs(m.CI['min'] - m.median) for m in x_metrics] x_unit = x_metrics[0].unit x_list = [m.list_values for m in x_metrics] x_scatter = [value for sublist in x_list for value in sublist] y_metrics = self._find_metrics(y_metric_id, self.results) y_values = [m.median for m in y_metrics] y_err_high = [m.CI['max'] - m.median for m in y_metrics] y_err_low = [abs(m.CI['min'] - m.median) for m in y_metrics] y_unit = y_metrics[0].unit y_list = [m.list_values for m in y_metrics] y_scatter = [value for sublist in y_list for value in sublist] plt.xlabel('{0}({1})'.format(x_metric_id, x_unit)) plt.ylabel('{0}({1})'.format(y_metric_id, y_unit)) plt.xlabel('{0}({1})'.format(x_metric_id,x_unit)) plt.ylabel('{0}({1})'.format(y_metric_id, y_unit)) plt.title(profile['name']) logging.info("plot: x={} - y={}".format(x_metric_id, y_metric_id)) plt.grid(b=True, which='both', color='lightgrey', linestyle='--') plt.errorbar(x_values, y_values, xerr=[x_err_low, x_err_high], yerr=[y_err_low, y_err_high], fmt='--o', capsize=2, color='red') plt.scatter(x_scatter, y_scatter) i += 1 plt.tight_layout() plt.show()
def _load_ped_file(ped_path): """ Loads the specified PED file. :param ped_path: path to file :return: dictionary """ yml = None try: yml = read_yaml(ped_path) if yml is None: raise BaseException("PED file YMAL error.") except: LOG.error("Couldn't load PED file %r. Abort." % ped_path) exit(1) # add path annotation to ped file (simpler handling of referenced artifacts) yml["ped_path"] = ped_path LOG.info("Loaded PED file %r." % ped_path) return yml
def display_graph(self, file=None): if file: self.results = read_yaml(file) plt.close("all") plt.ioff() logging.info("profile graphs:{}".format(self.profile_graphs)) n = len(self.profile_graphs) plt.subplots(nrows=n, ncols=1) i = 1 for profile in self.profile_graphs: plt.subplot(n, 1, i) x_metric_id = profile['input_metric'] y_metric_id = profile['output_metric'] x_metrics = self._find_metrics(x_metric_id, self.results) x_values = [m['average'] for m in x_metrics] x_err_high = [m['CI_high'] - m['average'] for m in x_metrics] x_err_low = [abs(m['CI_low'] - m['average']) for m in x_metrics] x_unit = x_metrics[0]['unit'] y_metrics = self._find_metrics(y_metric_id, self.results) y_values = [m['average'] for m in y_metrics] y_err_high = [m['CI_high'] - m['average'] for m in y_metrics] y_err_low = [abs(m['CI_low'] - m['average']) for m in y_metrics] y_unit = y_metrics[0]['unit'] plt.xlabel('{0}({1})'.format(x_metric_id, x_unit)) plt.ylabel('{0}({1})'.format(y_metric_id, y_unit)) plt.title(profile['name']) plt.grid(b=True, which='both', color='lightgrey', linestyle='--') plt.errorbar(x_values, y_values, xerr=[x_err_low, x_err_high], yerr=[y_err_low, y_err_high], fmt='--o', capsize=2) i += 1 plt.tight_layout() plt.show()
def display_graph(self, file=None): if file: self.results = read_yaml(file) plt.close("all") plt.ioff() logging.info("profile graphs:{}".format(self.profile_graphs)) n = len(self.profile_graphs) plt.subplots(nrows=n, ncols=1) i = 1 for profile in self.profile_graphs: plt.subplot(n, 1, i) x_metric_id = profile['input_metric'] y_metric_id = profile['output_metric'] x_metrics = self._find_metrics(x_metric_id, self.results) x_values = [m['average'] for m in x_metrics] x_err_high = [m['CI_high']-m['average'] for m in x_metrics] x_err_low = [abs(m['CI_low']-m['average']) for m in x_metrics] x_unit = x_metrics[0]['unit'] y_metrics = self._find_metrics(y_metric_id, self.results) y_values = [m['average'] for m in y_metrics] y_err_high = [m['CI_high']-m['average'] for m in y_metrics] y_err_low = [abs(m['CI_low']-m['average']) for m in y_metrics] y_unit = y_metrics[0]['unit'] plt.xlabel('{0}({1})'.format(x_metric_id,x_unit)) plt.ylabel('{0}({1})'.format(y_metric_id, y_unit)) plt.title(profile['name']) plt.grid(b=True, which='both', color='lightgrey', linestyle='--') plt.errorbar(x_values, y_values, xerr=[x_err_low, x_err_high], yerr=[y_err_low, y_err_high], fmt='--o', capsize=2) i += 1 plt.tight_layout() plt.show()
# translate metric names to the prometheus query class MetricTemplate(object): def __init__(self, **definition): self.metric_name = None self.query_template = None self.unit = None self.desc = None # populate object from definition dict (eg. from YAML) self.__dict__.update(definition) # import all prometheus metrics from yml file src_path = os.path.join('prometheus', 'prometheus_queries.yml') srcfile = pkg_resources.resource_filename(__name__, src_path) prometheus_metrics = read_yaml(srcfile) # All metric types in the prometheus config file nsdlink_metrics = ['packet_rate', 'byte_rate', 'packet_count', 'byte_count', 'packet_rate_cadv', 'byte_rate_cadv', 'packet_count_cadv', 'byte_count_cadv'] network_metrics = ['packet_rate', 'byte_rate', 'packet_count', 'byte_count', 'packet_rate_cadv', 'byte_rate_cadv', 'packet_count_cadv', 'byte_count_cadv'] testvnf_metrics = ['packet_loss', 'jitter', 'throughput'] compute_metrics = ['cpu', 'mem', 'host_cpu'] metric2flowquery = {} for metric in prometheus_metrics['flowquery']: metric2flowquery[metric['metric_name']] = MetricTemplate(**metric) compute2vnfquery = {} for metric in prometheus_metrics['computequery']:
class MetricTemplate(object): def __init__(self, **definition): self.metric_name = None self.query_template = None self.unit = None self.desc = None # populate object from definition dict (eg. from YAML) self.__dict__.update(definition) # import all prometheus metrics from yml file src_path = os.path.join('prometheus', 'prometheus_queries.yml') srcfile = pkg_resources.resource_filename(__name__, src_path) prometheus_metrics = read_yaml(srcfile) metric2flowquery = {} for metric in prometheus_metrics['flowquery']: metric2flowquery[metric['metric_name']] = MetricTemplate(**metric) compute2vnfquery = {} for metric in prometheus_metrics['computequery']: compute2vnfquery[metric['metric_name']] = MetricTemplate(**metric) network2vnfquery = {} for metric in prometheus_metrics['networkquery']: network2vnfquery[metric['metric_name']] = MetricTemplate(**metric) test2vnfquery = {} for metric in prometheus_metrics['testvnfquery']:
def _embed_function_into_experiment_nsd( self, service, ec, template="template/sonata_nsd_function_experiment.yml"): """ Generates a NSD that contains the single VNF of the given function experiment and embeds the specified function into it. The new NSD overwrites the existing NSD in service. This unifies the follow up procedures for measurement point inclusion etc. The NSD template for this can be found in the template/ folder. """ template_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), template) new_nsd = read_yaml(template_path) # 1. update VNF section old_vnf_dict = None for vnf in service.nsd.get("network_functions"): if str(vnf.get("vnf_name")) in ec.experiment.function: old_vnf_dict = vnf if old_vnf_dict is None: LOG.error("Couldn't find function '{}' in service '{}'".format( ec.experiment.function, service)) new_vnf_dict = new_nsd.get("network_functions")[0] new_vnf_dict.update(old_vnf_dict) LOG.debug("Updated VNF section in '{}': {}".format( service, new_vnf_dict)) # 1.5 remove obsolete VNFDs old_list = service.vnfd_list.copy() service.vnfd_list = list() for vnfd in old_list: if vnfd.get("name") == new_vnf_dict.get("vnf_name"): service.vnfd_list.append(vnfd) LOG.debug("Updated VNFD list in '{}': {}".format( service, service.vnfd_list)) # 2. update virtual link section (get first three CPs from VNFD) # TODO remove order assumptions (current version is more a HACK!) vnfd = service.get_vnfd_by_uid(ec.experiment.function) new_link_list = new_nsd.get("virtual_links") cp_ids = [cp.get("id") for cp in vnfd.get("connection_points")] for i in range(0, min(len(new_link_list), len(cp_ids))): cpr = new_link_list[i]["connection_points_reference"] for j in range(0, len(cpr)): if "test_vnf" in cpr[j]: cpr[j] = "{}:{}".format(new_vnf_dict.get("vnf_id"), cp_ids[i]) LOG.debug("Updated VLink section in '{}': {}".format( service, new_link_list)) # 3. update forwarding path section # TODO remove order assumptions (current version is more a HACK!) for fg in new_nsd.get("forwarding_graphs"): fg.get("constituent_vnfs")[0] = new_vnf_dict.get("vnf_id") for nfp in fg.get("network_forwarding_paths"): nfp_cp_list = nfp.get("connection_points") for i in range(1, min(len(nfp_cp_list), len(cp_ids))): if "test_vnf" in nfp_cp_list[i].get( "connection_point_ref"): nfp_cp_list[i][ "connection_point_ref"] = "{}:{}".format( new_vnf_dict.get("vnf_id"), cp_ids[i]) LOG.debug("Updated forwarding graph section in '{}': {}".format( service, new_nsd.get("forwarding_graphs"))) # 4. replace NSD service.nsd = new_nsd