def __init__(self, workspace=None): """ Initialize the Validator. A workspace may be provided for an easy parameter configuration, such as location and extension of descriptors, verbosity level, etc. :param workspace: SONATA workspace object """ self._workspace = workspace self._syntax = True self._integrity = True self._topology = True # create "virtual" workspace if not provided (don't actually create # file structure) if not self._workspace: self._workspace = Workspace('.', log_level='info') # load configurations from workspace self._dext = self._workspace.default_descriptor_extension self._dpath = '.' self._log_level = self._workspace.log_level # configure logs coloredlogs.install(level=self._log_level) # descriptors storage self._storage = DescriptorStorage() # syntax validation self._schema_validator = SchemaValidator(self._workspace) # reset event logger evtlog.reset()
def __init__(self, workspace, project=None, services=None, functions=None, dst_path=None, generate_pd=True, version="1.0"): # Assign parameters coloredlogs.install(level=workspace.log_level) self._version = version self._package_descriptor = None self._workspace = workspace self._project = project self._services = services self._functions = functions # Create a son-access client self._access = AccessClient(self._workspace, log_level=self._workspace.log_level) # Create a validator self._validator = Validator(workspace=workspace) self._validator.configure(syntax=True, integrity=False, topology=False) # Create a schema validator self._schema_validator = SchemaValidator(workspace) # Keep track of VNF packaging referenced in NS self._ns_vnf_registry = {} # location to write the package self._dst_path = dst_path if dst_path else '.' # temporary working directory self._workdir = '.package-' + str(time.time()) # Specifies THE service template of this package self._entry_service_template = None # Keep a list of repositories and # catalogue servers that this package depend on. # This will be included in the Package Resolver Section self._package_resolvers = [] # Keep a list of external artifact # dependencies that this package depends up on # This will be included in the Artifact Dependencies Section self._artifact_dependencies = [] # States if this package is self-contained, # i.e. if contains all its relevant artifacts self._sealed = True # Clear and create package specific folder if generate_pd: self.init_package_skeleton() self.build_package()
def __init__(self, workspace, project, dst_path=None, generate_pd=True, version="1.0"): # Assign parameters coloredlogs.install(level=workspace.log_level) self._version = version self._package_descriptor = None self._workspace = workspace self._project = project # Create a schema validator self._schema_validator = SchemaValidator(workspace) self._catalogueClients = [] # Read catalogue servers from workspace # configfile and create clients for cat in workspace.catalogue_servers: self._catalogueClients.append(CatalogueClient(cat['url'])) # Keep track of VNF packaging referenced in NS self._ns_vnf_registry = {} self._dst_path = dst_path # Specifies THE service template of this package self._entry_service_template = None # Keep a list of repositories and # catalogue servers that this package depend on. # This will be included in the Package Resolver Section self._package_resolvers = [] # Keep a list of external artifact # dependencies that this package depends up on # This will be included in the Artifact Dependencies Section self._artifact_dependencies = [] # States if this package is self-contained, # i.e. if contains all its relevant artifacts self._sealed = True # Clear and create package specific folder if generate_pd: self.init_package_skeleton(dst_path) self.package_descriptor = self._project
def __init__(self, workspace=None): """ Initialize the Validator. A workspace may be provided for an easy parameter configuration, such as location and extension of descriptors, verbosity level, etc. :param workspace: SONATA workspace object """ self._workspace = workspace self._syntax = True self._integrity = True self._topology = True # create "virtual" workspace if not provided (don't actually create # file structure) if not self._workspace: self._workspace = Workspace('.', log_level='info') # load configurations from workspace self._dext = self._workspace.default_descriptor_extension self._dpath = '.' self._log_level = self._workspace.log_level # configure logs coloredlogs.install(level=self._log_level) # descriptors storage self._storage = DescriptorStorage() # syntax validation self._schema_validator = SchemaValidator(self._workspace) # wrapper to count number of errors and warnings log.error = CountCalls(log.error) log.warning = CountCalls(log.warning)
def __init__(self, workspace, project=None, component=None, catalogue=None): # Assign parameters coloredlogs.install(level=workspace.log_level) self._workspace = workspace self._project = project self._component = component self._catalogue = catalogue self._catalogue_clients = [] # Instantiate catalogue clients self.create_catalogue_clients() # Create a schema validator self._schema_validator = SchemaValidator(workspace)
class Packager(object): def __init__(self, workspace, project=None, services=None, functions=None, dst_path=None, generate_pd=True, version="1.0"): # Assign parameters coloredlogs.install(level=workspace.log_level) self._version = version self._package_descriptor = None self._workspace = workspace self._project = project self._services = services self._functions = functions # Create a son-access client self._access = AccessClient(self._workspace, log_level=self._workspace.log_level) # Create a validator self._validator = Validator(workspace=workspace) self._validator.configure(syntax=True, integrity=False, topology=False) # Create a schema validator self._schema_validator = SchemaValidator(workspace) # Keep track of VNF packaging referenced in NS self._ns_vnf_registry = {} # location to write the package self._dst_path = dst_path if dst_path else '.' # temporary working directory self._workdir = '.package-' + str(time.time()) # Specifies THE service template of this package self._entry_service_template = None # Keep a list of repositories and # catalogue servers that this package depend on. # This will be included in the Package Resolver Section self._package_resolvers = [] # Keep a list of external artifact # dependencies that this package depends up on # This will be included in the Artifact Dependencies Section self._artifact_dependencies = [] # States if this package is self-contained, # i.e. if contains all its relevant artifacts self._sealed = True # Clear and create package specific folder if generate_pd: self.init_package_skeleton() self.build_package() def init_package_skeleton(self): """ Validate and initialize the destination folder for the creation of the package artifacts. """ if os.path.isdir(self._workdir): log.error("Internal error. Temporary workdir already exists.") return # workdir os.mkdir(self._workdir) atexit.register(shutil.rmtree, os.path.abspath(self._workdir)) # destination path if not os.path.isdir(self._dst_path): os.mkdir(self._dst_path) @property def package_descriptor(self): return self._package_descriptor def build_package(self): """ Create and set the full package descriptor as a dictionary. It process the file by each individual section. """ log.info('Create Package Content Section') package_content = self.package_pcs() log.info('Create Package Resolver Section') package_resolver = self.package_prs() log.info('Create Package Dependencies Section') package_dependencies = self.package_pds() log.info('Create Artifact Dependencies Section') artifact_dependencies = self.package_ads() # The general section must be created last, # some fields depend on prior processing log.info('Create General Description section') if self._project: general_description = self.package_gds( prj_descriptor=self._project.project_config) else: general_description = self.package_gds() if not general_description: log.error("Failed to package General Description Section.") return # Compile all sections in package descriptor self._package_descriptor = general_description if not package_content: log.error("Failed to package Package Content Section. " "Could not find a network service and/or its " "referenced function descriptors") self._package_descriptor = None return self._package_descriptor.update(package_content) self._package_descriptor.update(package_resolver) self._package_descriptor.update(package_dependencies) self._package_descriptor.update(artifact_dependencies) # Create the manifest folder and file meta_inf = os.path.join(self._workdir, "META-INF") os.makedirs(meta_inf, exist_ok=True) with open(os.path.join(meta_inf, "MANIFEST.MF"), "w") as manifest: manifest.write( yaml.dump(self.package_descriptor, default_flow_style=False)) @performance def package_gds(self, prj_descriptor=None): """ Compile information for the General Description Section. This section is exclusively filled by the project descriptor file located on the root of every project. """ # List of mandatory fields to be included in the GDS gds_fields = ['vendor', 'name', 'version', 'maintainer', 'description'] gds = dict() gds['descriptor_version'] = self._version gds['schema'] = self._schema_validator.get_remote_schema( SchemaValidator.SCHEMA_PACKAGE_DESCRIPTOR) gds['sealed'] = self._sealed if prj_descriptor: gds['entry_service_template'] = self._entry_service_template if 'package' not in prj_descriptor.keys(): log.error("Please define 'package' section in {}".format( Project.__descriptor_name__)) return errors = [] for field in gds_fields: if field not in prj_descriptor['package'].keys(): errors.append(field) else: gds[field] = prj_descriptor['package'][field] if errors: log.error( 'Please define {} in the package section of {}'.format( ', '.join(errors), Project.__descriptor_name__)) return else: # TODO: what properties to set in a custom package? TBD... gds['vendor'] = 'custom' gds['name'] = 'package' gds['version'] = '1.0' gds['maintainer'] = 'developer' gds['description'] = 'custom generated package' return gds @performance def package_pcs(self): """ Compile information for the Package Content Section. This section contains all the artifacts that are contained and shipped by the package. """ pcs = [] # Load and add service descriptor if self._project: nsd = self.generate_project_nsd() if not nsd or len(nsd) == 0: log.error("Failed to package service descriptor") return pcs += nsd elif self._services: nsds = self.generate_custom_nsds() if not nsds: log.error("Failed to package service descriptors") return pcs += nsds # Load and add the function descriptors if self._project: vnfds = self.generate_project_vnfds() if not vnfds or len(vnfds) == 0: log.error("Failed to package function descriptors") return pcs += vnfds elif self._functions: vnfds = self.generate_custom_vnfds() if not vnfds: log.error("Failed to package function descriptors") return pcs += vnfds return dict(package_content=pcs) @performance def package_prs(self): """ Compile information for the Package Resolver Section. This section contains information about catalogues and repositories needed to resolve the dependencies specified in this package descriptor. """ if len(self._package_resolvers) == 0: log.debug("There are no required Package Resolvers. " "This section will not be included.") return dict() return dict(package_resolvers=self._package_resolvers) @performance def package_pds(self): """ Compile information for the Package Dependencies Section. This section specifies additional packages that this package depends up on. """ log.debug("There are no required Package Dependencies. " "This section will not be included.") return dict() @performance def package_ads(self): """ Compile information for the Artifact Dependencies Section. This section contains components that are not included in the package but are referenced in its descriptors. For instance, it includes the url of vm_images used by network functions. """ if len(self._artifact_dependencies) == 0: log.debug("There are no required Artifact Dependencies. " "This section will not be included.") return dict() return dict(artifact_dependencies=self._artifact_dependencies) def generate_project_nsd(self): """ Compile information for the service descriptor section. """ base_path = os.path.join(self._project.project_root, 'sources', 'nsd') if not os.path.isdir(base_path): log.error("Missing NS directory '{}'".format(base_path)) return # Ensure that only one NS descriptor exists nsd_list = [ file for file in os.listdir(base_path) if os.path.isfile(os.path.join(base_path, file)) and file.endswith(self._project.descriptor_extension) ] check = len(nsd_list) if check == 0: log.error("Missing NS Descriptor file.") return elif check > 1: log.error("Only one NS Descriptor file is allowed.") return else: nsd_filename = nsd_list[0] with open(os.path.join(base_path, nsd_filename), 'r') as _file: nsd = yaml.load(_file) # Validate NSD log.debug( "Validating Service Descriptor NSD='{}'".format(nsd_filename)) if not self._validator.validate_service( os.path.join(base_path, nsd_filename)): log.error("Failed to validate Service Descriptor '{}'. " "Aborting package creation".format(nsd_filename)) return # Cycle through VNFs and register their IDs for later dependency check if 'network_functions' in nsd: vnf_list = \ [vnf for vnf in nsd['network_functions'] if vnf['vnf_name']] for vnf in vnf_list: self.register_ns_vnf( get_vnf_id_full(vnf['vnf_vendor'], vnf['vnf_name'], vnf['vnf_version'])) # Create SD location nsd = os.path.join(base_path, nsd_filename) sd_path = os.path.join(self._workdir, "service_descriptors") os.makedirs(sd_path, exist_ok=True) # Copy service descriptor file sd = os.path.join(sd_path, nsd_filename) self.copy_descriptor_file(nsd, sd) # Generate NSD package content entry pce = [] pce_sd = dict() pce_sd["content-type"] = "application/sonata.service_descriptor" pce_sd["name"] = "/service_descriptors/{}".format(nsd_filename) pce_sd["md5"] = generate_hash(sd) pce.append(pce_sd) # Specify the NSD as THE entry service template of package descriptor self._entry_service_template = pce_sd['name'] return pce def generate_custom_nsds(self): """ Compile information for the service descriptors, when creating a custom package. """ log.info("Packaging service descriptors...") for nsd_filename in self._services: if not self._validator.validate_service(nsd_filename): log.error( "Failed to package service '{}'".format(nsd_filename)) return # Create SD location sd_path = os.path.join(self._workdir, "service_descriptors") os.makedirs(sd_path, exist_ok=True) # Copy service descriptors and generate their entry points pce = [] for nsd_filename in self._services: nsd_basename = os.path.basename(nsd_filename) sd = os.path.join(sd_path, nsd_basename) self.copy_descriptor_file(nsd_filename, sd) pce_sd = dict() pce_sd["content-type"] = "application/sonata.service_descriptor" pce_sd["name"] = "/service_descriptors/{}".format(nsd_basename) pce_sd["md5"] = generate_hash(sd) pce.append(pce_sd) return pce def generate_project_vnfds(self): """ Compile information for the function descriptors, when packaging an SDK project. """ # Add VNFs from project source log.info("Packaging VNF descriptors from project source...") pcs = self.generate_project_source_vnfds( os.path.join(self._project.project_root, 'sources', 'vnf')) # Verify that all VNFs from NSD were packaged unpack_vnfs = self.get_unpackaged_ns_vnfs() if len(unpack_vnfs) > 0: # Load function descriptors (VNFDs) from external sources log.info("Solving dependencies for VNF descriptors...") if not self.load_external_vnfds(unpack_vnfs): log.error("Unable to solve all dependencies " "required by the service descriptor.") return log.info("Packaging VNF descriptors from external source...") pcs_ext = self.generate_external_vnfds( os.path.join(self._workspace.workspace_root, self._workspace.vnf_catalogue_dir), unpack_vnfs) if not pcs_ext or len(pcs_ext) == 0: return pcs += pcs_ext # Verify again if all VNFs were correctly packaged unpack_vnfs = self.get_unpackaged_ns_vnfs() if len(unpack_vnfs) > 0: log.error("Unable to validate all VNFs " "required by the service descriptor.") return return pcs def generate_custom_vnfds(self): """ Compile information for the function descriptors, when creating a custom package. """ log.info("Packaging VNF descriptors...") for vnfd_filename in self._functions: if not self._validator.validate_function(vnfd_filename): log.error( "Failed to package function '{}'".format(vnfd_filename)) return # Create FD location sd_path = os.path.join(self._workdir, "function_descriptors") os.makedirs(sd_path, exist_ok=True) # Copy function descriptors and generate their entry points pce = [] for vnfd_filename in self._functions: vnfd_basename = os.path.basename(vnfd_filename) sd = os.path.join(sd_path, vnfd_basename) self.copy_descriptor_file(vnfd_filename, sd) pce_sd = dict() pce_sd["content-type"] = "application/sonata.function_descriptor" pce_sd["name"] = "/service_descriptors/{}".format(vnfd_basename) pce_sd["md5"] = generate_hash(sd) pce.append(pce_sd) return pce def load_external_vnfds(self, vnf_id_list): """ This method is responsible to load all VNFs, required by the NS, that are not part of project source. VNFs can be loaded from the Workspace catalog or/and from the catalogue servers. :param vnf_id_list: List of VNF ID to solve :return: True for success, False for failure """ log.debug( "Loading the following VNF descriptors: {}".format(vnf_id_list)) # Iterate through the VNFs required by the NS for vnf_id in vnf_id_list: log.debug("Probing workspace catalogue for VNF id='{}'...".format( vnf_id)) # >> First, check if this VNF is in the workspace catalogue catalogue_path = os.path.join(self._workspace.workspace_root, self._workspace.vnf_catalogue_dir, vnf_id) if os.path.isdir(catalogue_path): # Exists! Save catalogue path of this vnf for later packaging log.debug( "Found VNF id='{}' in workspace catalogue '{}'".format( vnf_id, catalogue_path)) continue log.debug("VNF id='{}' is not present in workspace catalogue. " "Contacting SP Catalogue...".format(vnf_id)) # If not in WS catalogue, get the VNF from the SP Catalogues vnfd = None self.retrieve_external_vnf(vnf_id) if not vnfd: log.warning( "VNF id='{}' is not present in SP Catalogue".format( vnf_id)) return False # Create dir to hold the retrieved VNF in workspace catalogue log.debug("VNF id='{}' retrieved from the SP Catalogue. " "Loading to workspace cache.".format(vnf_id)) os.mkdir(catalogue_path) vnfd_f = open( os.path.join( catalogue_path, vnfd['name'] + "." + self._project.descriptor_extension), 'w') yaml.dump(vnfd, vnfd_f, default_flow_style=False) return True def generate_project_source_vnfds(self, base_path): """ Compile information for the list of VNFs This function iterates over the different VNF entries :param base_path: base dir location of VNF descriptors :return: """ vnf_folders = filter( lambda file: os.path.isdir(os.path.join(base_path, file)), os.listdir(base_path)) pcs = [] for vnf in vnf_folders: pc_entries = self.generate_vnfd_entry(os.path.join(base_path, vnf), vnf) if not pc_entries or len(pc_entries) == 0: continue for pce in iter(pc_entries): pcs.append(pce) return pcs def generate_external_vnfds(self, base_path, vnf_ids): vnf_folders = filter( lambda file: os.path.isdir(os.path.join(base_path, file)) and file in vnf_ids, os.listdir(base_path)) pcs = [] for vnf in vnf_folders: pc_entries = self.generate_vnfd_entry(os.path.join(base_path, vnf), vnf) if not pc_entries or len(pc_entries) == 0: continue for pce in iter(pc_entries): pcs.append(pce) return pcs def generate_vnfd_entry(self, base_path, vnf): """ Compile information for a specific VNF. The VNF descriptor is validated and added to the package.VDU image files, referenced in the VNF descriptor, are added to the package. :param base_path: The path where the VNF file is located :param vnf: The VNF reference path :return: The package content entries. """ # Locate VNFD vnfd_list = [ file for file in os.listdir(base_path) if os.path.isfile(os.path.join(base_path, file)) and file.endswith(self._project.descriptor_extension) ] # Validate number of Yaml files check = len(vnfd_list) if check == 0: log.warning("Missing VNF descriptor file in path '{}'. " "A descriptor with '{}' extension should be " "in this path".format( base_path, self._project.descriptor_extension)) return elif check > 1: log.warning("Multiple YAML descriptors found in '{}'. " "Ignoring path.".format(os.path.basename(base_path))) return else: with open(os.path.join(base_path, vnfd_list[0]), 'r') as _file: vnfd = yaml.load(_file) vnfd_path = os.path.join(os.path.basename(base_path), vnfd_list[0]) # Validate VNFD log.debug("Validating VNF descriptor file='{}'".format(vnfd_path)) if not self._validator.validate_function( os.path.join(base_path, vnfd_list[0])): log.exception( "Failed to validate VNF descriptor '{}'".format(vnfd_path)) return # Check if this VNF exists in the ns_vnf registry. # If does not, cancel its packaging if not self.check_in_ns_vnf(get_vnf_id(vnfd)): log.warning("VNF id='{}' file='{}' is not referenced in the " "service descriptor. It will be excluded from " "the package".format(get_vnf_id(vnfd), vnfd_path)) return pce = [] # Create fd location fd_path = os.path.join(self._workdir, "function_descriptors") os.makedirs(fd_path, exist_ok=True) # Copy the descriptor file fd = os.path.join(fd_path, vnfd_list[0]) self.copy_descriptor_file(os.path.join(base_path, vnfd_list[0]), fd) # Generate VNFD Entry pce_fd = dict() pce_fd["content-type"] = "application/sonata.function_descriptor" pce_fd["name"] = "/function_descriptors/{}".format(vnfd_list[0]) pce_fd["md5"] = generate_hash(fd) pce.append(pce_fd) if 'virtual_deployment_units' in vnfd: vdu_list = [ vdu for vdu in vnfd['virtual_deployment_units'] if vdu['vm_image'] ] for vdu in vdu_list: # vm_image can be a local File, a local Dir, # a URL or a reference to docker image vdu_image_path = vdu['vm_image'] if validators.url(vdu_image_path): # Check if is URL/URI. try: # Check if the image URL exists with a short Timeout requests.head(vdu_image_path, timeout=1) except (requests.Timeout, requests.ConnectionError): log.warning("Failed to verify the " "existence of vm_image '{}'".format( vdu['vm_image'])) # Add image URL to artifact dependencies self._add_artifact_dependency( name=vnfd['name'] + '-' + vdu['id'] + '-vm_image', vendor=vnfd['vendor'], version=vnfd['version'], url=vdu['vm_image'], md5='02236f2ae558018ed14b5222ef1bd9f1') # TODO: remote url must provide md5? This is dummy! continue else: # Check for URL local (e.g. file:///...) ptokens = pathlib.Path(vdu_image_path).parts if ptokens[0] == 'file:': # URL to local file bd = os.path.join(base_path, ptokens[1]) else: # regular filename/path bd = os.path.join(base_path, vdu['vm_image']) if os.path.exists(bd): # local File or local Dir if os.path.isfile(bd): pce.append( self.__pce_img_gen__(base_path, vnf, vdu, vdu['vm_image'], dir_p='', dir_o='')) elif os.path.isdir(bd): for root, dirs, files in os.walk(bd): dir_o = root[len(bd):] dir_p = dir_o.replace(os.path.sep, "/") for f in files: if dir_o.startswith(os.path.sep): dir_o = dir_o[1:] pce.append( self.__pce_img_gen__(root, vnf, vdu, f, dir_p=dir_p, dir_o=dir_o)) elif vdu['vm_image_format'] == 'docker': log.debug("Referenced vm_image is docker '{}'".format( vdu['vm_image'])) return pce @staticmethod def copy_descriptor_file(src_descriptor, dst_descriptor): """ Copy a descriptor file. Instead of just copying the file, it parses and reads the content of the source file, then it creates a new file and writes in it the digested content. :param src_descriptor: :param dst_descriptor: :return: """ with open(src_descriptor, "r") as vnfd_file: vnf_content = yaml.load(vnfd_file) with open(dst_descriptor, "w") as vnfd_file: vnfd_file.write(yaml.dump(vnf_content, default_flow_style=False)) def __pce_img_gen__(self, bd, vnf, vdu, f, dir_p='', dir_o=''): pce = dict() img_format = 'raw' \ if not vdu['vm_image_format'] \ else vdu['vm_image_format'] pce["content-type"] = "application/sonata.{}_files".format(img_format) pce["name"] = "/{}_files/{}{}/{}".format(img_format, vnf, dir_p, f) pce["md5"] = self.__pce_img_gen_fc__(img_format, vnf, f, bd, dir_o) return pce def __pce_img_gen_fc__(self, img_format, vnf, f, root, dir_o=''): fd_path = os.path.join("{}_files".format(img_format), vnf, dir_o) fd_path = os.path.join(self._workdir, fd_path) os.makedirs(fd_path, exist_ok=True) fd = os.path.join(fd_path, f) shutil.copyfile(os.path.join(root, f), fd) return generate_hash(fd) def generate_package(self, name): """ Generate the final package version. :param name: The name of the final version of the package, the project name will be used if no name provided """ # Validate all needed information if not self._package_descriptor: log.critical("Missing package descriptor. " "Failed to generate package.") exit(1) if not name: name = self._package_descriptor['vendor'] + "." + \ self._package_descriptor['name'] + "." + \ self._package_descriptor['version'] # Generate package file zip_name = os.path.join(self._dst_path, name + '.son') with closing(zipfile.ZipFile(zip_name, 'w')) as pck: for base, dirs, files in os.walk(self._workdir): for file_name in files: full_path = os.path.join(base, file_name) relative_path = \ full_path[len(self._workdir) + len(os.sep):] if not full_path == zip_name: pck.write(full_path, relative_path) # Validate PD log.debug("Validating Package") if not self._validator.validate_package(zip_name): log.debug("Failed to validate Package Descriptor. " "Aborting package creation.") self._package_descriptor = None return package_md5 = generate_hash(zip_name) log.info("Package generated successfully.\nFile: {}\nMD5: {}\n".format( os.path.abspath(zip_name), package_md5)) def register_ns_vnf(self, vnf_id): """ Add a vnf to the NS VNF registry. :param vnf_id: :return: True for successful registry. False if the VNF already exists in the registry. """ if vnf_id in self._ns_vnf_registry: return False self._ns_vnf_registry[vnf_id] = False return True def check_in_ns_vnf(self, vnf_id): """Marks a VNF as packaged in the SD VNF registry.""" if vnf_id not in self._ns_vnf_registry: return False self._ns_vnf_registry[vnf_id] = True return True def get_unpackaged_ns_vnfs(self): """ Obtain the a list of VNFs that were referenced by NS but weren't packaged. """ u_vnfs = [] for vnf in self._ns_vnf_registry: if not self._ns_vnf_registry[vnf]: u_vnfs.append(vnf) return u_vnfs def retrieve_external_vnf(self, descriptor_id): """ Retrieve descriptor from the service Platform catalogue. It will loop through available Service Plaforms to retrieve the required descriptor :return: descriptor content """ # first, contact the default platform vnfd = self._access.pull_resource('functions', identifier=descriptor_id, uuid=False) if vnfd: return vnfd # if not retrieved, loop through remaining platforms for platform, p_id in self._workspace.service_platforms.items(): # ignore default platform if p_id == self._workspace.default_service_platform: continue vnfd = self._access.pull_resource('functions', identifier=descriptor_id, uuid=False, platform_id=p_id) if vnfd: return vnfd def _add_package_resolver(self, name, username='******', password='******'): log.debug("Adding package resolver entry '{}'".format(name)) # Check if already included for pr_entry in self._package_resolvers: if pr_entry['name'] == name: log.debug("Package resolver entry '{}' " "was previously added. Ignoring.".format(name)) return pr_entry = { 'name': name, 'credentials': { 'username': username, 'password': password } } self._package_resolvers.append(pr_entry) def _add_artifact_dependency(self, name, vendor, version, url, md5, username='******', password='******'): log.debug("Adding artifact dependency entry '{}'".format(name)) # Check if already included for ad_entry in self._artifact_dependencies: if ad_entry['name'] == name: log.debug("Artifact dependency entry '{}' " "was previously added. Ignoring.".format(name)) return ad_entry = { 'name': name, 'vendor': vendor, 'version': version, 'url': url, 'md5': md5, 'credentials': { 'username': username, 'password': password } } self._artifact_dependencies.append(ad_entry) # Set package sealed to false as it will not be self-contained self._sealed = False
class Validator(object): def __init__(self, workspace=None): """ Initialize the Validator. A workspace may be provided for an easy parameter configuration, such as location and extension of descriptors, verbosity level, etc. :param workspace: SONATA workspace object """ self._workspace = workspace self._syntax = True self._integrity = True self._topology = True # create "virtual" workspace if not provided (don't actually create # file structure) if not self._workspace: self._workspace = Workspace('.', log_level='info') # load configurations from workspace self._dext = self._workspace.default_descriptor_extension self._dpath = '.' self._log_level = self._workspace.log_level # configure logs coloredlogs.install(level=self._log_level) # descriptors storage self._storage = DescriptorStorage() # syntax validation self._schema_validator = SchemaValidator(self._workspace) # reset event logger evtlog.reset() @property def errors(self): return evtlog.errors @property def error_count(self): """ Provides the number of errors given during validation. """ return len(self.errors) @property def warnings(self): return evtlog.warnings @property def warning_count(self): """ Provides the number of warnings given during validation. """ return len(self.warnings) def configure(self, syntax=None, integrity=None, topology=None, dpath=None, dext=None, debug=False): """ Configure parameters for validation. It is recommended to call this function before performing a validation. :param syntax: specifies whether to validate syntax :param integrity: specifies whether to validate integrity :param topology: specifies whether to validate network topology :param dpath: directory to search for function descriptors (VNFDs) :param dext: extension of descriptor files (default: 'yml') :param debug: increase verbosity level of logger """ # assign parameters if syntax is not None: self._syntax = syntax if integrity is not None: self._integrity = integrity if topology is not None: self._topology = topology if dext is not None: self._dext = dext if dpath is not None: self._dpath = dpath if debug: print("yay") coloredlogs.install(level='debug') def _assert_configuration(self): """ Ensures that the current configuration is compatible with the validation to perform. If issues are found the application is interrupted with the appropriate error. This is an internal function which must be invoked only by: - 'validate_package' - 'validate_project' - 'validate_service' - 'validate_function' """ # ensure this function is called by specific functions caller = inspect.stack()[1][3] if caller != 'validate_function' and caller != 'validate_service' and \ caller != 'validate_project' and caller != 'validate_package': log.error("Cannot assert a correct configuration. Validation " "scope couldn't be determined. Aborting") sys.exit(1) # general rules - apply to all validations if self._integrity and not self._syntax: log.error("Cannot validate integrity without validating syntax " "first. Aborting.") sys.exit(1) if self._topology and not self._integrity: log.error("Cannot validate topology without validating integrity " "first. Aborting.") sys.exit(1) if not self._syntax: log.error("Nothing to validate. Aborting.") sys.exit(1) if caller == 'validate_package': pass elif caller == 'validate_project': pass elif caller == 'validate_service': # check SERVICE validation parameters if (self._integrity or self._topology) and not \ (self._dpath and self._dext): log.critical("Invalid validation parameters. To validate the " "integrity or topology of a service both " "'--dpath' and '--dext' parameters must be " "specified.") sys.exit(1) elif caller == 'validate_function': pass def validate_package(self, package): """ Validate a SONATA package. By default, it performs the following validations: syntax, integrity and network topology. :param package: SONATA package filename :return: True if all validations were successful, False otherwise """ self._assert_configuration() log.info("Validating package '{0}'".format(os.path.abspath(package))) # check if package is packed in the correct format if not zipfile.is_zipfile(package): evtlog.log("Invalid SONATA package '{}'".format(package), 'evt_package_format_invalid') return package_dir = '.' + str(time.time()) with closing(zipfile.ZipFile(package, 'r')) as pkg: # extract package contents pkg.extractall(package_dir) # set folder for deletion when program exits atexit.register(shutil.rmtree, package_dir) # validate package file structure if not self._validate_package_struct(package_dir): evtlog.log("Invalid SONATA package structure '{}'".format(package), 'evt_package_struct_invalid') return pd_filename = os.path.join(package_dir, 'META-INF', 'MANIFEST.MF') package = self._storage.create_package(pd_filename) if self._syntax and not self._validate_package_syntax(package): return if self._integrity and \ not self._validate_package_integrity(package, package_dir): return return True def validate_project(self, project): """ Validate a SONATA project. By default, it performs the following validations: syntax, integrity and network topology. :param project: SONATA project :return: True if all validations were successful, False otherwise """ self._assert_configuration() log.info("Validating project '{0}'".format(project.project_root)) log.info("... syntax: {0}, integrity: {1}, topology: {2}".format( self._syntax, self._integrity, self._topology)) # retrieve project configuration self._dpath = project.vnfd_root self._dext = project.descriptor_extension # load all project descriptors present at source directory log.debug("Loading project service") nsd_file = Validator._load_project_service_file(project) return self.validate_service(nsd_file) def validate_service(self, nsd_file): """ Validate a SONATA service. By default, it performs the following validations: syntax, integrity and network topology. :param nsd_file: service descriptor filename :return: True if all validations were successful, False otherwise """ self._assert_configuration() log.info("Validating service '{0}'".format(nsd_file)) log.info("... syntax: {0}, integrity: {1}, topology: {2}".format( self._syntax, self._integrity, self._topology)) service = self._storage.create_service(nsd_file) if not service: evtlog.log( "Failed to read the service descriptor of file '{}'".format( nsd_file), 'evt_service_invalid_descriptor') return # validate service syntax if self._syntax and not self._validate_service_syntax(service): return if self._integrity and not self._validate_service_integrity(service): return if self._topology and not self._validate_service_topology(service): return return True def validate_function(self, vnfd_path): """ Validate one or multiple SONATA functions (VNFs). By default, it performs the following validations: syntax, integrity and network topology. :param vnfd_path: function descriptor (VNFD) filename or a directory to search for VNFDs :return: True if all validations were successful, False otherwise """ self._assert_configuration() # validate multiple VNFs if os.path.isdir(vnfd_path): log.info("Validating functions in path '{0}'".format(vnfd_path)) vnfd_files = list_files(vnfd_path, self._dext) for vnfd_file in vnfd_files: if not self.validate_function(vnfd_file): return return True log.info("Validating function '{0}'".format(vnfd_path)) log.info("... syntax: {0}, integrity: {1}, topology: {2}".format( self._syntax, self._integrity, self._topology)) function = self._storage.create_function(vnfd_path) if not function: evtlog.log("Couldn't store VNF of file '{0}'".format(vnfd_path), 'evt_function_invalid_descriptor') return if self._syntax and not self._validate_function_syntax(function): return if self._integrity and not self._validate_function_integrity(function): return if self._topology and not self._validate_function_topology(function): return return True def _validate_package_struct(self, package_dir): """ Validate the file structure of a SONATA package. :param package_dir: directory of extracted package :return: True if successful, False otherwise """ # validate directory 'META-INF' meta_dir = os.path.join(package_dir, 'META-INF') if not os.path.isdir(meta_dir): log.error("A directory named 'META-INF' must exist, " "located at the root of the package") return if len(os.listdir(meta_dir)) > 1: log.error("The 'META-INF' directory must only contain the file " "'MANIFEST.MF'") return if not os.path.exists(os.path.join(meta_dir, 'MANIFEST.MF')): log.error("A file named 'MANIFEST.MF' must exist in directory " "'META-INF'") return # validate directory 'service_descriptors' services_dir = os.path.join(package_dir, 'service_descriptors') if os.path.isdir(services_dir): if len(os.listdir(services_dir)) == 0: log.error("The 'service_descriptors' directory must contain at" " least one service descriptor file") return # validate directory 'function_descriptors' functions_dir = os.path.join(package_dir, 'function_descriptors') if os.path.isdir(functions_dir): if len(os.listdir(functions_dir)) == 0: log.error("The 'function_descriptors' directory must contain " "at least one function descriptor file") return return True def _validate_package_syntax(self, package): """ Validate the syntax of the package descriptor of a SONATA package against its schema. :param package: package object to validate :return: True if syntax is correct, None otherwise """ log.info("Validating syntax of package descriptor '{0}'".format( package.id)) if not self._schema_validator.validate( package.content, SchemaValidator.SCHEMA_PACKAGE_DESCRIPTOR): evtlog.log( "Invalid syntax in MANIFEST of package: '{0}'".format( package.id), 'evt_pd_stx_invalid') return return True def _validate_service_syntax(self, service): """ Validate a the syntax of a service (NS) against its schema. :param service: service to validate :return: True if syntax is correct, None otherwise """ log.info("Validating syntax of service '{0}'".format(service.id)) if not self._schema_validator.validate( service.content, SchemaValidator.SCHEMA_SERVICE_DESCRIPTOR): evtlog.log("Invalid syntax in service: '{0}'".format(service.id), 'evt_nsd_stx_invalid') return return True def _validate_function_syntax(self, function): """ Validate the syntax of a function (VNF) against its schema. :param function: function to validate :return: True if syntax is correct, None otherwise """ log.info("Validating syntax of function '{0}'".format(function.id)) if not self._schema_validator.validate( function.content, SchemaValidator.SCHEMA_FUNCTION_DESCRIPTOR): evtlog.log("Invalid syntax in function '{0}'".format(function.id), 'evt_vnfd_stx_invalid') return return True def _validate_package_integrity(self, package, root_dir): """ Validate the integrity of a package. It will validate the entry service of the package as well as its referenced functions. :param package: package object :return: True if syntax is correct, None otherwise """ log.info("Validating integrity of package '{0}'".format(package.id)) # load referenced service descriptor files for f in package.descriptors: filename = os.path.join(root_dir, strip_root(f)) log.debug("Verifying file '{0}'".format(f)) if not os.path.isfile(filename): evtlog.log( "Referenced descriptor file '{0}' is not " "packaged.".format(f), 'evt_pd_itg_invalid_reference') return gen_md5 = generate_hash(filename) manif_md5 = package.md5(strip_root(f)) if manif_md5 and gen_md5 != manif_md5: evtlog.log( "MD5 hash of file '{0}' is not equal to the " "defined in package descriptor:\nGen MD5:\t{1}\n" "MANIF MD5:\t{2}".format(f, gen_md5, manif_md5), 'evt_pd_itg_invalid_md5') # configure dpath for function referencing self.configure(dpath=os.path.join(root_dir, 'function_descriptors')) # finally, validate the package entry service file entry_service_file = os.path.join( root_dir, strip_root(package.entry_service_file)) return self.validate_service(entry_service_file) def _validate_service_integrity(self, service): """ Validate the integrity of a service (NS). It checks for inconsistencies in the identifiers of connection points, virtual links, etc. :param service: service to validate :return: True if integrity is correct :param service: :return: """ log.info("Validating integrity of service '{0}'".format(service.id)) # get referenced function descriptors (VNFDs) if not self._load_service_functions(service): evtlog.log("Failed to read service function descriptors", 'evt_nsd_itg_function_unavailable') return # validate service function descriptors (VNFDs) for fid, function in service.functions.items(): if not self.validate_function(function.filename): evtlog.log( "Failed to validate function descriptor '{0}'".format( function.filename), 'evt_nsd_itg_function_invalid') return # load service interfaces if not service.load_interfaces(): evtlog.log( "Couldn't load the connection points of service id='{0}'". format(service.id), 'evt_nsd_itg_badsection_cpoints') return # load service links if not service.load_virtual_links(): evtlog.log( "Couldn't load virtual links of service id='{0}'".format( service.id), 'evt_nsd_itg_badsection_vlinks') return undeclared = service.find_undeclared_interfaces() if undeclared: evtlog.log( "Virtual links section has undeclared connection " "points: {0}".format(undeclared), 'evt_nsd_itg_undeclared_cpoint') return # check for unused interfaces unused_ifaces = service.find_unused_interfaces() if unused_ifaces: evtlog.log( "Service has unused connection points: {0}".format( unused_ifaces), 'evt_nsd_itg_unused_cpoint') # verify integrity between vnf_ids and links for lid, link in service.links.items(): for iface in link.interfaces: if iface not in service.interfaces: iface_tokens = iface.split(':') if len(iface_tokens) != 2: evtlog.log( "Connection point '{0}' in virtual link " "'{1}' is not defined".format(iface, lid), 'evt_nsd_itg_undefined_cpoint') return vnf_id = iface_tokens[0] function = service.mapped_function(vnf_id) if not function: evtlog.log( "Function (VNF) of vnf_id='{0}' declared " "in connection point '{0}' in virtual link " "'{1}' is not defined".format(vnf_id, iface, lid), 'evt_nsd_itg_undefined_cpoint') return return True def _validate_function_integrity(self, function): """ Validate the integrity of a function (VNF). It checks for inconsistencies in the identifiers of connection points, virtual deployment units (VDUs), ... :param function: function to validate :return: True if integrity is correct """ log.info("Validating integrity of function descriptor '{0}'".format( function.id)) # load function interfaces if not function.load_interfaces(): evtlog.log( "Couldn't load the interfaces of function id='{0}'".format( function.id), 'evt_vnfd_itg_badsection_cpoints') return # load units if not function.load_units(): evtlog.log( "Couldn't load the units of function id='{0}'".format( function.id), 'evt_vnfd_itg_badsection_vdus') return # load interfaces of units if not function.load_unit_interfaces(): evtlog.log( "Couldn't load unit interfaces of function id='{0}'".format( function.id), 'evt_vnfd_itg_vdu_badsection_cpoints') return # load function links if not function.load_virtual_links(): evtlog.log( "Couldn't load the links of function id='{0}'".format( function.id), 'evt_vnfd_itg_badsection_vlinks') return # check for undeclared interfaces undeclared = function.find_undeclared_interfaces() if undeclared: evtlog.log( "Virtual links section has undeclared connection " "points: {0}".format(undeclared), 'evt_vnfd_itg_undeclared_cpoint') return # check for unused interfaces unused_ifaces = function.find_unused_interfaces() if unused_ifaces: evtlog.log( "Function has unused connection points: {0}".format( unused_ifaces), 'evt_vnfd_itg_unused_cpoint') # verify integrity between unit interfaces and units for lid, link in function.links.items(): for iface in link.interfaces: iface_tokens = iface.split(':') if len(iface_tokens) > 1: if iface_tokens[0] not in function.units.keys(): evtlog.log( "Invalid interface id='{0}' of link id='{1}'" ": Unit id='{2}' is not defined".format( iface, lid, iface_tokens[0]), 'evt_vnfd_itg_undefined_cpoint') return return True def _validate_service_topology(self, service): """ Validate the network topology of a service. :return: """ log.info("Validating topology of service '{0}'".format(service.id)) # build service topology graph with VNF interfaces service.graph = service.build_topology_graph(level=1, bridges=False) if not service.graph: evtlog.log( "Couldn't build topology graph of service '{0}'".format( service.id), 'evt_nsd_top_topgraph_failed') return log.debug("Built topology graph of service '{0}': {1}".format( service.id, service.graph.edges())) # write service graphs with different levels and options self.write_service_graphs(service) if nx.is_connected(service.graph): log.debug("Topology graph of service '{0}' is connected".format( service.id)) else: evtlog.log( "Topology graph of service '{0}' is disconnected".format( service.id), 'evt_nsd_top_topgraph_disconnected') # load forwarding paths if not service.load_forwarding_paths(): evtlog.log( "Couldn't load service forwarding paths. " "Aborting validation.", 'evt_nsd_top_badsection_fwgraph') return # analyse forwarding paths for fpid, fw_path in service.fw_paths.items(): log.debug("Building forwarding path id='{0}'".format(fpid)) # check if number of connection points is odd if len(fw_path) % 2 != 0: evtlog.log( "The forwarding path id='{0}' has an odd number " "of connection points".format(fpid), 'evt_nsd_top_fwgraph_cpoints_odd') trace = service.trace_path(fw_path) if 'BREAK' in trace: evtlog.log( "The forwarding path id='{0}' is invalid for the " "specified topology. {1} breakpoint(s) " "found the path: {2}".format(fpid, trace.count('BREAK'), trace), 'evt_nsd_top_fwpath_invalid') # skip further analysis on this path continue log.debug("Forwarding path id='{0}': {1}".format(fpid, trace)) # path is valid in specified topology, let's check for cycles fpg = nx.Graph() fpg.add_path(trace) cycles = Validator._find_graph_cycles(fpg, fpg.nodes()[0]) if cycles and len(cycles) > 0: evtlog.log( "Found cycles forwarding path id={0}: {1}".format( fpid, cycles), 'evt_nsd_top_fwpath_cycles') return True def _validate_function_topology(self, function): """ Validate the network topology of a function. It builds the topology graph of the function, including VDU connections. :param function: function to validate :return: True if topology doesn't present issues """ log.info("Validating topology of function '{0}'".format(function.id)) # build function topology graph function.graph = function.build_topology_graph(bridges=True) if not function.graph: evtlog.log( "Couldn't build topology graph of function '{0}'".format( function.id), 'evt_vnfd_top_topgraph_failed') return log.debug("Built topology graph of function '{0}': {1}".format( function.id, function.graph.edges())) # check for path cycles #cycles = Validator._find_graph_cycles(function.graph, # function.graph.nodes()[0]) #if cycles and len(cycles) > 0: # log.warning("Found cycles in network graph of function " # "'{0}':\n{0}".format(function.id, cycles)) return True def _load_service_functions(self, service): """ Loads and stores functions (VNFs) referenced in the specified service :param service: service :return: True if successful, None otherwise """ log.debug("Loading functions of the service.") # get VNFD file list from provided dpath vnfd_files = list_files(self._dpath, self._dext) log.debug("Found {0} descriptors in dpath='{2}': {1}".format( len(vnfd_files), vnfd_files, self._dpath)) # load all VNFDs path_vnfs = read_descriptor_files(vnfd_files) # check for errors if 'network_functions' not in service.content: log.error("Service doesn't have any functions. " "Missing 'network_functions' section.") return functions = service.content['network_functions'] if functions and not path_vnfs: log.error("Service references VNFs but none could be found in " "'{0}'. Please specify another '--dpath'".format( self._dpath)) return # store function descriptors referenced in the service for function in functions: fid = build_descriptor_id(function['vnf_vendor'], function['vnf_name'], function['vnf_version']) if fid not in path_vnfs.keys(): log.error("Referenced function descriptor id='{0}' couldn't " "be loaded".format(fid)) return vnf_id = function['vnf_id'] new_func = self._storage.create_function(path_vnfs[fid]) service.associate_function(new_func, vnf_id) return True @staticmethod def _load_project_service_file(project): """ Load descriptors from a SONATA SDK project. :param project: SDK project :return: True if successful, False otherwise """ # load project service descriptor (NSD) nsd_files = project.get_ns_descriptor() if not nsd_files: evtlog.log( "Couldn't find a service descriptor in project '[0}'".format( project.project_root), 'evt_project_service_invalid') return False if len(nsd_files) > 1: evtlog.log( "Found multiple service descriptors in project " "'{0}': {1}".format(project.project_root, nsd_files), 'evt_project_service_multiple') return False return nsd_files[0] @staticmethod def _find_graph_cycles(graph, node, prev_node=None, backtrace=None): if not backtrace: backtrace = [] # get node's neighbors neighbors = graph.neighbors(node) # remove previous node from neighbors if prev_node: neighbors.pop(neighbors.index(prev_node)) # ensure node has neighbors if not len(neighbors) > 0: return None # check is this node was already visited if node in backtrace: cycle = backtrace[backtrace.index(node):] return cycle # mark this node as visited and trace it backtrace.append(node) # iterate through neighbor nodes for neighbor in neighbors: return Validator._find_graph_cycles(graph, neighbor, prev_node=node, backtrace=backtrace) return backtrace def write_service_graphs(self, service): graphsdir = 'graphs' try: os.makedirs(graphsdir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(graphsdir): pass g = service.build_topology_graph(level=3, bridges=False) for lvl in range(0, 4): g = service.build_topology_graph(level=lvl, bridges=False) nx.write_graphml( g, os.path.join(graphsdir, "{0}-lvl{1}.graphml".format(service.id, lvl))) g = service.build_topology_graph(level=lvl, bridges=True) nx.write_graphml( g, os.path.join(graphsdir, "{0}-lvl{1}-br.graphml".format(service.id, lvl)))
class Packager(object): def __init__(self, workspace, project=None, services=None, functions=None, dst_path=None, generate_pd=True, version="1.0"): # Assign parameters coloredlogs.install(level=workspace.log_level) self._version = version self._package_descriptor = None self._workspace = workspace self._project = project self._services = services self._functions = functions # Create a son-access client self._access = AccessClient(self._workspace, log_level=self._workspace.log_level) # Create a validator self._validator = Validator(workspace=workspace) self._validator.configure(syntax=True, integrity=False, topology=False) # Create a schema validator self._schema_validator = SchemaValidator(workspace) # Keep track of VNF packaging referenced in NS self._ns_vnf_registry = {} # location to write the package self._dst_path = dst_path if dst_path else '.' # temporary working directory self._workdir = '.package-' + str(time.time()) # Specifies THE service template of this package self._entry_service_template = None # Keep a list of repositories and # catalogue servers that this package depend on. # This will be included in the Package Resolver Section self._package_resolvers = [] # Keep a list of external artifact # dependencies that this package depends up on # This will be included in the Artifact Dependencies Section self._artifact_dependencies = [] # States if this package is self-contained, # i.e. if contains all its relevant artifacts self._sealed = True # Clear and create package specific folder if generate_pd: self.init_package_skeleton() self.build_package() def init_package_skeleton(self): """ Validate and initialize the destination folder for the creation of the package artifacts. """ if os.path.isdir(self._workdir): log.error("Internal error. Temporary workdir already exists.") return # workdir os.mkdir(self._workdir) atexit.register(shutil.rmtree, os.path.abspath(self._workdir)) # destination path if not os.path.isdir(self._dst_path): os.mkdir(self._dst_path) @property def package_descriptor(self): return self._package_descriptor def build_package(self): """ Create and set the full package descriptor as a dictionary. It process the file by each individual section. """ log.info('Create Package Content Section') package_content = self.package_pcs() log.info('Create Package Resolver Section') package_resolver = self.package_prs() log.info('Create Package Dependencies Section') package_dependencies = self.package_pds() log.info('Create Artifact Dependencies Section') artifact_dependencies = self.package_ads() # The general section must be created last, # some fields depend on prior processing log.info('Create General Description section') if self._project: general_description = self.package_gds( prj_descriptor=self._project.project_config) else: general_description = self.package_gds() if not general_description: log.error("Failed to package General Description Section.") return # Compile all sections in package descriptor self._package_descriptor = general_description if not package_content: log.error("Failed to package Package Content Section. " "Could not find a network service and/or its " "referenced function descriptors") self._package_descriptor = None return self._package_descriptor.update(package_content) self._package_descriptor.update(package_resolver) self._package_descriptor.update(package_dependencies) self._package_descriptor.update(artifact_dependencies) # Create the manifest folder and file meta_inf = os.path.join(self._workdir, "META-INF") os.makedirs(meta_inf, exist_ok=True) with open(os.path.join(meta_inf, "MANIFEST.MF"), "w") as manifest: manifest.write(yaml.dump(self.package_descriptor, default_flow_style=False)) @performance def package_gds(self, prj_descriptor=None): """ Compile information for the General Description Section. This section is exclusively filled by the project descriptor file located on the root of every project. """ # List of mandatory fields to be included in the GDS gds_fields = ['vendor', 'name', 'version', 'maintainer', 'description'] gds = dict() gds['descriptor_version'] = self._version gds['schema'] = self._schema_validator.get_remote_schema( SchemaValidator.SCHEMA_PACKAGE_DESCRIPTOR) gds['sealed'] = self._sealed if prj_descriptor: gds['entry_service_template'] = self._entry_service_template if 'package' not in prj_descriptor.keys(): log.error("Please define 'package' section in {}" .format(Project.__descriptor_name__)) return errors = [] for field in gds_fields: if field not in prj_descriptor['package'].keys(): errors.append(field) else: gds[field] = prj_descriptor['package'][field] if errors: log.error('Please define {} in the package section of {}' .format(', '.join(errors), Project.__descriptor_name__)) return else: # TODO: what properties to set in a custom package? TBD... gds['vendor'] = 'custom' gds['name'] = 'package' gds['version'] = '1.0' gds['maintainer'] = 'developer' gds['description'] = 'custom generated package' return gds @performance def package_pcs(self): """ Compile information for the Package Content Section. This section contains all the artifacts that are contained and shipped by the package. """ pcs = [] # Load and add service descriptor if self._project: nsd = self.generate_project_nsd() if not nsd or len(nsd) == 0: log.error("Failed to package service descriptor") return pcs += nsd elif self._services: nsds = self.generate_custom_nsds() if not nsds: log.error("Failed to package service descriptors") return pcs += nsds # Load and add the function descriptors if self._project: vnfds = self.generate_project_vnfds() if not vnfds or len(vnfds) == 0: log.error("Failed to package function descriptors") return pcs += vnfds elif self._functions: vnfds = self.generate_custom_vnfds() if not vnfds: log.error("Failed to package function descriptors") return pcs += vnfds return dict(package_content=pcs) @performance def package_prs(self): """ Compile information for the Package Resolver Section. This section contains information about catalogues and repositories needed to resolve the dependencies specified in this package descriptor. """ if len(self._package_resolvers) == 0: log.debug("There are no required Package Resolvers. " "This section will not be included.") return dict() return dict(package_resolvers=self._package_resolvers) @performance def package_pds(self): """ Compile information for the Package Dependencies Section. This section specifies additional packages that this package depends up on. """ log.debug("There are no required Package Dependencies. " "This section will not be included.") return dict() @performance def package_ads(self): """ Compile information for the Artifact Dependencies Section. This section contains components that are not included in the package but are referenced in its descriptors. For instance, it includes the url of vm_images used by network functions. """ if len(self._artifact_dependencies) == 0: log.debug("There are no required Artifact Dependencies. " "This section will not be included.") return dict() return dict(artifact_dependencies=self._artifact_dependencies) def generate_project_nsd(self): """ Compile information for the service descriptor section. """ base_path = os.path.join(self._project.project_root, 'sources', 'nsd') if not os.path.isdir(base_path): log.error("Missing NS directory '{}'".format(base_path)) return # Ensure that only one NS descriptor exists nsd_list = [file for file in os.listdir(base_path) if os.path.isfile(os.path.join(base_path, file)) and file.endswith(self._project.descriptor_extension)] check = len(nsd_list) if check == 0: log.error("Missing NS Descriptor file.") return elif check > 1: log.error("Only one NS Descriptor file is allowed.") return else: nsd_filename = nsd_list[0] with open(os.path.join(base_path, nsd_filename), 'r') as _file: nsd = yaml.load(_file) # Validate NSD log.debug("Validating Service Descriptor NSD='{}'" .format(nsd_filename)) if not self._validator.validate_service(os.path.join(base_path, nsd_filename)): log.error("Failed to validate Service Descriptor '{}'. " "Aborting package creation".format(nsd_filename)) return # Cycle through VNFs and register their IDs for later dependency check if 'network_functions' in nsd: vnf_list = \ [vnf for vnf in nsd['network_functions'] if vnf['vnf_name']] for vnf in vnf_list: self.register_ns_vnf(get_vnf_id_full(vnf['vnf_vendor'], vnf['vnf_name'], vnf['vnf_version'])) # Create SD location nsd = os.path.join(base_path, nsd_filename) sd_path = os.path.join(self._workdir, "service_descriptors") os.makedirs(sd_path, exist_ok=True) # Copy service descriptor file sd = os.path.join(sd_path, nsd_filename) self.copy_descriptor_file(nsd, sd) # Generate NSD package content entry pce = [] pce_sd = dict() pce_sd["content-type"] = "application/sonata.service_descriptor" pce_sd["name"] = "/service_descriptors/{}".format(nsd_filename) pce_sd["md5"] = generate_hash(sd) pce.append(pce_sd) # Specify the NSD as THE entry service template of package descriptor self._entry_service_template = pce_sd['name'] return pce def generate_custom_nsds(self): """ Compile information for the service descriptors, when creating a custom package. """ log.info("Packaging service descriptors...") for nsd_filename in self._services: if not self._validator.validate_service(nsd_filename): log.error("Failed to package service '{}'" .format(nsd_filename)) return # Create SD location sd_path = os.path.join(self._workdir, "service_descriptors") os.makedirs(sd_path, exist_ok=True) # Copy service descriptors and generate their entry points pce = [] for nsd_filename in self._services: nsd_basename = os.path.basename(nsd_filename) sd = os.path.join(sd_path, nsd_basename) self.copy_descriptor_file(nsd_filename, sd) pce_sd = dict() pce_sd["content-type"] = "application/sonata.service_descriptor" pce_sd["name"] = "/service_descriptors/{}".format(nsd_basename) pce_sd["md5"] = generate_hash(sd) pce.append(pce_sd) return pce def generate_project_vnfds(self): """ Compile information for the function descriptors, when packaging an SDK project. """ # Add VNFs from project source log.info("Packaging VNF descriptors from project source...") pcs = self.generate_project_source_vnfds(os.path.join( self._project.project_root, 'sources', 'vnf')) # Verify that all VNFs from NSD were packaged unpack_vnfs = self.get_unpackaged_ns_vnfs() if len(unpack_vnfs) > 0: # Load function descriptors (VNFDs) from external sources log.info("Solving dependencies for VNF descriptors...") if not self.load_external_vnfds(unpack_vnfs): log.error("Unable to solve all dependencies " "required by the service descriptor.") return log.info("Packaging VNF descriptors from external source...") pcs_ext = self.generate_external_vnfds(os.path.join( self._workspace.workspace_root, self._workspace.vnf_catalogue_dir), unpack_vnfs) if not pcs_ext or len(pcs_ext) == 0: return pcs += pcs_ext # Verify again if all VNFs were correctly packaged unpack_vnfs = self.get_unpackaged_ns_vnfs() if len(unpack_vnfs) > 0: log.error("Unable to validate all VNFs " "required by the service descriptor.") return return pcs def generate_custom_vnfds(self): """ Compile information for the function descriptors, when creating a custom package. """ log.info("Packaging VNF descriptors...") for vnfd_filename in self._functions: if not self._validator.validate_function(vnfd_filename): log.error("Failed to package function '{}'" .format(vnfd_filename)) return # Create FD location sd_path = os.path.join(self._workdir, "function_descriptors") os.makedirs(sd_path, exist_ok=True) # Copy function descriptors and generate their entry points pce = [] for vnfd_filename in self._functions: vnfd_basename = os.path.basename(vnfd_filename) sd = os.path.join(sd_path, vnfd_basename) self.copy_descriptor_file(vnfd_filename, sd) pce_sd = dict() pce_sd["content-type"] = "application/sonata.function_descriptor" pce_sd["name"] = "/service_descriptors/{}".format(vnfd_basename) pce_sd["md5"] = generate_hash(sd) pce.append(pce_sd) return pce def load_external_vnfds(self, vnf_id_list): """ This method is responsible to load all VNFs, required by the NS, that are not part of project source. VNFs can be loaded from the Workspace catalog or/and from the catalogue servers. :param vnf_id_list: List of VNF ID to solve :return: True for success, False for failure """ log.debug("Loading the following VNF descriptors: {}" .format(vnf_id_list)) # Iterate through the VNFs required by the NS for vnf_id in vnf_id_list: log.debug("Probing workspace catalogue for VNF id='{}'..." .format(vnf_id)) # >> First, check if this VNF is in the workspace catalogue catalogue_path = os.path.join( self._workspace.workspace_root, self._workspace.vnf_catalogue_dir, vnf_id) if os.path.isdir(catalogue_path): # Exists! Save catalogue path of this vnf for later packaging log.debug("Found VNF id='{}' in workspace catalogue '{}'" .format(vnf_id, catalogue_path)) continue log.debug("VNF id='{}' is not present in workspace catalogue. " "Contacting SP Catalogue...".format(vnf_id)) # If not in WS catalogue, get the VNF from the SP Catalogues vnfd = None self.retrieve_external_vnf(vnf_id) if not vnfd: log.warning("VNF id='{}' is not present in SP Catalogue" .format(vnf_id)) return False # Create dir to hold the retrieved VNF in workspace catalogue log.debug("VNF id='{}' retrieved from the SP Catalogue. " "Loading to workspace cache.".format(vnf_id)) os.mkdir(catalogue_path) vnfd_f = open(os.path.join(catalogue_path, vnfd['name'] + "." + self._project.descriptor_extension), 'w') yaml.dump(vnfd, vnfd_f, default_flow_style=False) return True def generate_project_source_vnfds(self, base_path): """ Compile information for the list of VNFs This function iterates over the different VNF entries :param base_path: base dir location of VNF descriptors :return: """ vnf_folders = filter( lambda file: os.path.isdir(os.path.join(base_path, file)), os.listdir(base_path)) pcs = [] for vnf in vnf_folders: pc_entries = self.generate_vnfd_entry( os.path.join(base_path, vnf), vnf) if not pc_entries or len(pc_entries) == 0: continue for pce in iter(pc_entries): pcs.append(pce) return pcs def generate_external_vnfds(self, base_path, vnf_ids): vnf_folders = filter( lambda file: os.path.isdir(os.path.join(base_path, file)) and file in vnf_ids, os.listdir(base_path)) pcs = [] for vnf in vnf_folders: pc_entries = self.generate_vnfd_entry(os.path.join( base_path, vnf), vnf) if not pc_entries or len(pc_entries) == 0: continue for pce in iter(pc_entries): pcs.append(pce) return pcs def generate_vnfd_entry(self, base_path, vnf): """ Compile information for a specific VNF. The VNF descriptor is validated and added to the package.VDU image files, referenced in the VNF descriptor, are added to the package. :param base_path: The path where the VNF file is located :param vnf: The VNF reference path :return: The package content entries. """ # Locate VNFD vnfd_list = [file for file in os.listdir(base_path) if os.path.isfile(os.path.join(base_path, file)) and file.endswith(self._project.descriptor_extension)] # Validate number of Yaml files check = len(vnfd_list) if check == 0: log.warning("Missing VNF descriptor file in path '{}'. " "A descriptor with '{}' extension should be " "in this path" .format(base_path, self._project.descriptor_extension)) return elif check > 1: log.warning("Multiple YAML descriptors found in '{}'. " "Ignoring path.".format(os.path.basename(base_path))) return else: with open(os.path.join(base_path, vnfd_list[0]), 'r') as _file: vnfd = yaml.load(_file) vnfd_path = os.path.join(os.path.basename(base_path), vnfd_list[0]) # Validate VNFD log.debug("Validating VNF descriptor file='{}'".format(vnfd_path)) if not self._validator.validate_function(os.path.join(base_path, vnfd_list[0])): log.exception("Failed to validate VNF descriptor '{}'" .format(vnfd_path)) return # Check if this VNF exists in the ns_vnf registry. # If does not, cancel its packaging if not self.check_in_ns_vnf(get_vnf_id(vnfd)): log.warning("VNF id='{}' file='{}' is not referenced in the " "service descriptor. It will be excluded from " "the package" .format(get_vnf_id(vnfd), vnfd_path)) return pce = [] # Create fd location fd_path = os.path.join(self._workdir, "function_descriptors") os.makedirs(fd_path, exist_ok=True) # Copy the descriptor file fd = os.path.join(fd_path, vnfd_list[0]) self.copy_descriptor_file(os.path.join(base_path, vnfd_list[0]), fd) # Generate VNFD Entry pce_fd = dict() pce_fd["content-type"] = "application/sonata.function_descriptor" pce_fd["name"] = "/function_descriptors/{}".format(vnfd_list[0]) pce_fd["md5"] = generate_hash(fd) pce.append(pce_fd) if 'virtual_deployment_units' in vnfd: vdu_list = [vdu for vdu in vnfd['virtual_deployment_units'] if vdu['vm_image']] for vdu in vdu_list: # vm_image can be a local File, a local Dir, # a URL or a reference to docker image vdu_image_path = vdu['vm_image'] if validators.url(vdu_image_path): # Check if is URL/URI. try: # Check if the image URL exists with a short Timeout requests.head(vdu_image_path, timeout=1) except (requests.Timeout, requests.ConnectionError): log.warning("Failed to verify the " "existence of vm_image '{}'" .format(vdu['vm_image'])) # Add image URL to artifact dependencies self._add_artifact_dependency( name=vnfd['name'] + '-' + vdu['id'] + '-vm_image', vendor=vnfd['vendor'], version=vnfd['version'], url=vdu['vm_image'], md5='02236f2ae558018ed14b5222ef1bd9f1') # TODO: remote url must provide md5? This is dummy! continue else: # Check for URL local (e.g. file:///...) ptokens = pathlib.Path(vdu_image_path).parts if ptokens[0] == 'file:': # URL to local file bd = os.path.join(base_path, ptokens[1]) else: # regular filename/path bd = os.path.join(base_path, vdu['vm_image']) if os.path.exists(bd): # local File or local Dir if os.path.isfile(bd): pce.append(self.__pce_img_gen__( base_path, vnf, vdu, vdu['vm_image'], dir_p='', dir_o='')) elif os.path.isdir(bd): for root, dirs, files in os.walk(bd): dir_o = root[len(bd):] dir_p = dir_o.replace(os.path.sep, "/") for f in files: if dir_o.startswith(os.path.sep): dir_o = dir_o[1:] pce.append(self.__pce_img_gen__( root, vnf, vdu, f, dir_p=dir_p, dir_o=dir_o)) elif vdu['vm_image_format'] == 'docker': log.debug("Referenced vm_image is docker '{}'" .format(vdu['vm_image'])) return pce @staticmethod def copy_descriptor_file(src_descriptor, dst_descriptor): """ Copy a descriptor file. Instead of just copying the file, it parses and reads the content of the source file, then it creates a new file and writes in it the digested content. :param src_descriptor: :param dst_descriptor: :return: """ with open(src_descriptor, "r") as vnfd_file: vnf_content = yaml.load(vnfd_file) with open(dst_descriptor, "w") as vnfd_file: vnfd_file.write(yaml.dump(vnf_content, default_flow_style=False)) def __pce_img_gen__(self, bd, vnf, vdu, f, dir_p='', dir_o=''): pce = dict() img_format = 'raw' \ if not vdu['vm_image_format'] \ else vdu['vm_image_format'] pce["content-type"] = "application/sonata.{}_files".format(img_format) pce["name"] = "/{}_files/{}{}/{}".format(img_format, vnf, dir_p, f) pce["md5"] = self.__pce_img_gen_fc__(img_format, vnf, f, bd, dir_o) return pce def __pce_img_gen_fc__(self, img_format, vnf, f, root, dir_o=''): fd_path = os.path.join("{}_files".format(img_format), vnf, dir_o) fd_path = os.path.join(self._workdir, fd_path) os.makedirs(fd_path, exist_ok=True) fd = os.path.join(fd_path, f) shutil.copyfile(os.path.join(root, f), fd) return generate_hash(fd) def generate_package(self, name): """ Generate the final package version. :param name: The name of the final version of the package, the project name will be used if no name provided """ # Validate all needed information if not self._package_descriptor: log.critical("Missing package descriptor. " "Failed to generate package.") exit(1) if not name: name = self._package_descriptor['vendor'] + "." + \ self._package_descriptor['name'] + "." + \ self._package_descriptor['version'] # Generate package file zip_name = os.path.join(self._dst_path, name + '.son') with closing(zipfile.ZipFile(zip_name, 'w')) as pck: for base, dirs, files in os.walk(self._workdir): for file_name in files: full_path = os.path.join(base, file_name) relative_path = \ full_path[len(self._workdir) + len(os.sep):] if not full_path == zip_name: pck.write(full_path, relative_path) # Validate PD log.debug("Validating Package") if not self._validator.validate_package(zip_name): log.debug("Failed to validate Package Descriptor. " "Aborting package creation.") self._package_descriptor = None return package_md5 = generate_hash(zip_name) log.info("Package generated successfully.\nFile: {}\nMD5: {}\n" .format(os.path.abspath(zip_name), package_md5)) def register_ns_vnf(self, vnf_id): """ Add a vnf to the NS VNF registry. :param vnf_id: :return: True for successful registry. False if the VNF already exists in the registry. """ if vnf_id in self._ns_vnf_registry: return False self._ns_vnf_registry[vnf_id] = False return True def check_in_ns_vnf(self, vnf_id): """Marks a VNF as packaged in the SD VNF registry.""" if vnf_id not in self._ns_vnf_registry: return False self._ns_vnf_registry[vnf_id] = True return True def get_unpackaged_ns_vnfs(self): """ Obtain the a list of VNFs that were referenced by NS but weren't packaged. """ u_vnfs = [] for vnf in self._ns_vnf_registry: if not self._ns_vnf_registry[vnf]: u_vnfs.append(vnf) return u_vnfs def retrieve_external_vnf(self, descriptor_id): """ Retrieve descriptor from the service Platform catalogue. It will loop through available Service Plaforms to retrieve the required descriptor :return: descriptor content """ # first, contact the default platform vnfd = self._access.pull_resource('functions', identifier=descriptor_id, uuid=False) if vnfd: return vnfd # if not retrieved, loop through remaining platforms for platform, p_id in self._workspace.service_platforms.items(): # ignore default platform if p_id == self._workspace.default_service_platform: continue vnfd = self._access.pull_resource('functions', identifier=descriptor_id, uuid=False, platform_id=p_id) if vnfd: return vnfd def _add_package_resolver(self, name, username='******', password='******'): log.debug("Adding package resolver entry '{}'".format(name)) # Check if already included for pr_entry in self._package_resolvers: if pr_entry['name'] == name: log.debug("Package resolver entry '{}' " "was previously added. Ignoring." .format(name)) return pr_entry = {'name': name, 'credentials': { 'username': username, 'password': password }} self._package_resolvers.append(pr_entry) def _add_artifact_dependency(self, name, vendor, version, url, md5, username='******', password='******'): log.debug("Adding artifact dependency entry '{}'".format(name)) # Check if already included for ad_entry in self._artifact_dependencies: if ad_entry['name'] == name: log.debug("Artifact dependency entry '{}' " "was previously added. Ignoring." .format(name)) return ad_entry = {'name': name, 'vendor': vendor, 'version': version, 'url': url, 'md5': md5, 'credentials': { 'username': username, 'password': password }} self._artifact_dependencies.append(ad_entry) # Set package sealed to false as it will not be self-contained self._sealed = False
class Validator(object): def __init__(self, workspace=None): """ Initialize the Validator. A workspace may be provided for an easy parameter configuration, such as location and extension of descriptors, verbosity level, etc. :param workspace: SONATA workspace object """ self._workspace = workspace self._syntax = True self._integrity = True self._topology = True # create "virtual" workspace if not provided (don't actually create # file structure) if not self._workspace: self._workspace = Workspace('.', log_level='info') # load configurations from workspace self._dext = self._workspace.default_descriptor_extension self._dpath = '.' self._log_level = self._workspace.log_level # configure logs coloredlogs.install(level=self._log_level) # descriptors storage self._storage = DescriptorStorage() # syntax validation self._schema_validator = SchemaValidator(self._workspace) # wrapper to count number of errors and warnings log.error = CountCalls(log.error) log.warning = CountCalls(log.warning) @property def error_count(self): """ Provides the number of errors given during validation. """ return log.error.counter @property def warning_count(self): """ Provides the number of warnings given during validation. """ return log.warning.counter def configure(self, syntax=None, integrity=None, topology=None, dpath=None, dext=None, debug=False): """ Configure parameters for validation. It is recommended to call this function before performing a validation. :param syntax: specifies whether to validate syntax :param integrity: specifies whether to validate integrity :param topology: specifies whether to validate network topology :param dpath: directory to search for function descriptors (VNFDs) :param dext: extension of descriptor files (default: 'yml') :param debug: increase verbosity level of logger """ # assign parameters if syntax is not None: self._syntax = syntax if integrity is not None: self._integrity = integrity if topology is not None: self._topology = topology if dext is not None: self._dext = dext if dpath is not None: self._dpath = dpath if debug: coloredlogs.install(level='debug') def _assert_configuration(self): """ Ensures that the current configuration is compatible with the validation to perform. If issues are found the application is interrupted with the appropriate error. This is an internal function which must be invoked only by: - 'validate_package' - 'validate_project' - 'validate_service' - 'validate_function' """ # ensure this function is called by specific functions caller = inspect.stack()[1][3] if caller != 'validate_function' and caller != 'validate_service' and \ caller != 'validate_project' and caller != 'validate_package': log.error("Cannot assert a correct configuration. Validation " "scope couldn't be determined. Aborting") sys.exit(1) # general rules - apply to all validations if self._integrity and not self._syntax: log.error("Cannot validate integrity without validating syntax " "first. Aborting.") sys.exit(1) if self._topology and not self._integrity: log.error("Cannot validate topology without validating integrity " "first. Aborting.") sys.exit(1) if not self._syntax: log.error("Nothing to validate. Aborting.") sys.exit(1) if caller == 'validate_package': pass elif caller == 'validate_project': pass elif caller == 'validate_service': # check SERVICE validation parameters if (self._integrity or self._topology) and not \ (self._dpath and self._dext): log.critical("Invalid validation parameters. To validate the " "integrity or topology of a service both " "'--dpath' and '--dext' parameters must be " "specified.") sys.exit(1) elif caller == 'validate_function': pass def validate_package(self, package): """ Validate a SONATA package. By default, it performs the following validations: syntax, integrity and network topology. :param package: SONATA package filename :return: True if all validations were successful, False otherwise """ self._assert_configuration() log.info("Validating package '{0}'".format(os.path.abspath(package))) # check if package is packed in the correct format if not zipfile.is_zipfile(package): log.error("Invalid SONATA package '{}'".format(package)) return package_dir = '.' + str(time.time()) with closing(zipfile.ZipFile(package, 'r')) as pkg: # extract package contents pkg.extractall(package_dir) # set folder for deletion when program exits atexit.register(shutil.rmtree, package_dir) # validate package file structure if not self._validate_package_struct(package_dir): return pd_filename = os.path.join(package_dir, 'META-INF', 'MANIFEST.MF') package = self._storage.create_package(pd_filename) if self._syntax and not self._validate_package_syntax(package): return if self._integrity and \ not self._validate_package_integrity(package, package_dir): return return True def validate_project(self, project): """ Validate a SONATA project. By default, it performs the following validations: syntax, integrity and network topology. :param project: SONATA project :return: True if all validations were successful, False otherwise """ self._assert_configuration() log.info("Validating project '{0}'".format(project.project_root)) log.info("... syntax: {0}, integrity: {1}, topology: {2}" .format(self._syntax, self._integrity, self._topology)) # retrieve project configuration self._dpath = project.vnfd_root self._dext = project.descriptor_extension # load all project descriptors present at source directory log.debug("Loading project service") nsd_file = Validator._load_project_service_file(project) return self.validate_service(nsd_file) def validate_service(self, nsd_file): """ Validate a SONATA service. By default, it performs the following validations: syntax, integrity and network topology. :param nsd_file: service descriptor filename :return: True if all validations were successful, False otherwise """ self._assert_configuration() log.info("Validating service '{0}'".format(nsd_file)) log.info("... syntax: {0}, integrity: {1}, topology: {2}" .format(self._syntax, self._integrity, self._topology)) service = self._storage.create_service(nsd_file) if not service: log.error("Failed to read the service descriptor of file '{}'" .format(nsd_file)) return # validate service syntax if self._syntax and not self._validate_service_syntax(service): return if self._integrity and not self._validate_service_integrity(service): return if self._topology and not self._validate_service_topology(service): return return True def validate_function(self, vnfd_path): """ Validate one or multiple SONATA functions (VNFs). By default, it performs the following validations: syntax, integrity and network topology. :param vnfd_path: function descriptor (VNFD) filename or a directory to search for VNFDs :return: True if all validations were successful, False otherwise """ self._assert_configuration() # validate multiple VNFs if os.path.isdir(vnfd_path): log.info("Validating functions in path '{0}'".format(vnfd_path)) vnfd_files = list_files(vnfd_path, self._dext) for vnfd_file in vnfd_files: if not self.validate_function(vnfd_file): return return True log.info("Validating function '{0}'".format(vnfd_path)) log.info("... syntax: {0}, integrity: {1}, topology: {2}" .format(self._syntax, self._integrity, self._topology)) function = self._storage.create_function(vnfd_path) if not function: log.critical("Couldn't store VNF of file '{0}'".format(vnfd_path)) return if self._syntax and not self._validate_function_syntax(function): return if self._integrity and not self._validate_function_integrity(function): return if self._topology and not self._validate_function_topology(function): return return True def _validate_package_struct(self, package_dir): """ Validate the file structure of a SONATA package. :param package_dir: directory of extracted package :return: True if successful, False otherwise """ # validate directory 'META-INF' meta_dir = os.path.join(package_dir, 'META-INF') if not os.path.isdir(meta_dir): log.error("A directory named 'META-INF' must exist, " "located at the root of the package") return if len(os.listdir(meta_dir)) > 1: log.error("The 'META-INF' directory must only contain the file " "'MANIFEST.MF'") return if not os.path.exists(os.path.join(meta_dir, 'MANIFEST.MF')): log.error("A file named 'MANIFEST.MF' must exist in directory " "'META-INF'") return # validate directory 'service_descriptors' services_dir = os.path.join(package_dir, 'service_descriptors') if os.path.isdir(services_dir): if len(os.listdir(services_dir)) == 0: log.error("The 'service_descriptors' directory must contain at" " least one service descriptor file") return # validate directory 'function_descriptors' functions_dir = os.path.join(package_dir, 'function_descriptors') if os.path.isdir(functions_dir): if len(os.listdir(functions_dir)) == 0: log.error("The 'function_descriptors' directory must contain " "at least one function descriptor file") return return True def _validate_package_syntax(self, package): """ Validate the syntax of the package descriptor of a SONATA package against its schema. :param package: package object to validate :return: True if syntax is correct, None otherwise """ log.info("Validating syntax of package descriptor '{0}'" .format(package.id)) if not self._schema_validator.validate( package.content, SchemaValidator.SCHEMA_PACKAGE_DESCRIPTOR): log.error("Invalid syntax in MANIFEST of package: '{0}'" .format(package.id)) return return True def _validate_service_syntax(self, service): """ Validate a the syntax of a service (NS) against its schema. :param service: service to validate :return: True if syntax is correct, None otherwise """ log.info("Validating syntax of service '{0}'".format(service.id)) if not self._schema_validator.validate( service.content, SchemaValidator.SCHEMA_SERVICE_DESCRIPTOR): log.error("Invalid syntax in service: '{0}'".format(service.id)) return return True def _validate_function_syntax(self, function): """ Validate the syntax of a function (VNF) against its schema. :param function: function to validate :return: True if syntax is correct, None otherwise """ log.info("Validating syntax of function '{0}'".format(function.id)) if not self._schema_validator.validate( function.content, SchemaValidator.SCHEMA_FUNCTION_DESCRIPTOR): log.error("Invalid syntax in function '{0}'".format(function.id)) return return True def _validate_package_integrity(self, package, root_dir): """ Validate the integrity of a package. It will validate the entry service of the package as well as its referenced functions. :param package: package object :return: True if syntax is correct, None otherwise """ log.info("Validating integrity of package '{0}'".format(package.id)) # load referenced service descriptor files for f in package.descriptors: filename = os.path.join(root_dir, strip_root(f)) log.debug("Verifying file '{0}'".format(f)) if not os.path.isfile(filename): log.error("Referenced descriptor file '{0}' is not " "packaged.".format(f)) return gen_md5 = generate_hash(filename) manif_md5 = package.md5(strip_root(f)) if manif_md5 and gen_md5 != manif_md5: log.warning("MD5 hash of file '{0}' is not equal to the " "defined in package descriptor:\nGen MD5:\t{1}\n" "MANIF MD5:\t{2}" .format(f, gen_md5, manif_md5)) # configure dpath for function referencing self.configure(dpath=os.path.join(root_dir, 'function_descriptors')) # finally, validate the package entry service file entry_service_file = os.path.join( root_dir, strip_root(package.entry_service_file)) return self.validate_service(entry_service_file) def _validate_service_integrity(self, service): """ Validate the integrity of a service (NS). It checks for inconsistencies in the identifiers of connection points, virtual links, etc. :param service: service to validate :return: True if integrity is correct :param service: :return: """ log.info("Validating integrity of service '{0}'".format(service.id)) # get referenced function descriptors (VNFDs) if not self._load_service_functions(service): log.error("Failed to read service function descriptors") return # load service interfaces if not service.load_interfaces(): log.error("Couldn't load the interfaces of service id='{0}'" .format(service.id)) return # load service links if not service.load_links(): log.error("Couldn't load the links of service id='{0}'" .format(service.id)) return # verify integrity between vnf_ids and links for lid, link in service.links.items(): for iface in link.iface_pair: if iface not in service.interfaces: iface_tokens = iface.split(':') if len(iface_tokens) != 2: log.error("Connection point '{0}' in virtual link " "'{1}' is not defined" .format(iface, lid)) return vnf_id = iface_tokens[0] function = service.mapped_function(vnf_id) if not function: log.error("Function (VNF) of vnf_id='{0}' declared " "in connection point '{0}' in virtual link " "'{1}' is not defined" .format(vnf_id, iface, lid)) return # validate service function descriptors (VNFDs) for fid, function in service.functions.items(): if not self.validate_function(function.filename): return return True def _validate_function_integrity(self, function): """ Validate the integrity of a function (VNF). It checks for inconsistencies in the identifiers of connection points, virtual deployment units (VDUs), ... :param function: function to validate :return: True if integrity is correct """ log.info("Validating integrity of function descriptor '{0}'" .format(function.id)) # load function interfaces if not function.load_interfaces(): log.error("Couldn't load the interfaces of function id='{0}'" .format(function.id)) return # load units if not function.load_units(): log.error("Couldn't load the units of function id='{0}'" .format(function.id)) return # load interfaces of units if not function.load_unit_interfaces(): log.error("Couldn't load unit interfaces of function id='{0}'" .format(function.id)) return # load function links if not function.load_links(): log.error("Couldn't load the links of function id='{0}'" .format(function.id)) return # verify integrity between unit interfaces and units for lid, link in function.links.items(): for iface in link.iface_pair: iface_tokens = iface.split(':') if len(iface_tokens) > 1: if iface_tokens[0] not in function.units.keys(): log.error("Invalid interface id='{0}' of link id='{1}'" ": Unit id='{2}' is not defined" .format(iface, lid, iface_tokens[0])) return return True def _validate_service_topology(self, service): """ Validate the network topology of a service. :return: """ log.info("Validating topology of service '{0}'".format(service.id)) # build service topology graph service.build_topology_graph(deep=False, interfaces=True, link_type='e-line') log.debug("Built topology graph of service '{0}': {1}" .format(service.id, service.graph.edges())) if nx.is_connected(service.graph): log.debug("Topology graph of service '{0}' is connected" .format(service.id)) else: log.warning("Topology graph of service '{0}' is disconnected" .format(service.id)) # load forwarding paths if not service.load_forwarding_paths(): log.error("Couldn't load service forwarding paths") return # analyse forwarding paths for fpid, fw_path in service.fw_paths.items(): trace = service.trace_path(fw_path) if 'BREAK' in trace: log.warning("The forwarding path id='{0}' is invalid for the " "specified topology. {1} breakpoints were " "found in the path: {2}" .format(fpid, trace.count('BREAK'), trace)) # skip further analysis on this path continue # path is valid in specified topology, let's check for cycles fpg = nx.Graph() fpg.add_path(trace) cycles = Validator._find_graph_cycles(fpg, fpg.nodes()[0]) if cycles and len(cycles) > 0: log.warning("Found cycles forwarding path id={0}: {1}" .format(fpid, cycles)) # TODO: find a more coherent method to do this nx.write_graphml(service.graph, "{0}.graphml".format(service.id)) return True def _validate_function_topology(self, function): """ Validate the network topology of a function. It builds the topology graph of the function, including VDU connections. :param function: function to validate :return: True if topology doesn't present issues """ log.info("Validating topology of function '{0}'" .format(function.id)) # build function topology graph function.build_topology_graph(link_type='e-line') log.debug("Built topology graph of function '{0}': {1}" .format(function.id, function.graph.edges())) # check for path cycles cycles = Validator._find_graph_cycles(function.graph, function.graph.nodes()[0]) if cycles and len(cycles) > 0: log.warning("Found cycles in network graph of function " "'{0}':\n{0}".format(function.id, cycles)) return True def _load_service_functions(self, service): """ Loads and stores functions (VNFs) referenced in the specified service :param service: service :return: True if successful, None otherwise """ log.debug("Loading functions of the service.") # get VNFD file list from provided dpath vnfd_files = list_files(self._dpath, self._dext) log.debug("Found {0} descriptors in dpath='{2}': {1}" .format(len(vnfd_files), vnfd_files, self._dpath)) # load all VNFDs path_vnfs = read_descriptor_files(vnfd_files) # check for errors if 'network_functions' not in service.content: log.error("Service doesn't have any functions. " "Missing 'network_functions' section.") return functions = service.content['network_functions'] if functions and not path_vnfs: log.error("Service references VNFs but none could be found in " "'{0}'. Please specify another '--dpath'" .format(self._dpath)) return # store function descriptors referenced in the service for function in functions: fid = build_descriptor_id(function['vnf_vendor'], function['vnf_name'], function['vnf_version']) if fid not in path_vnfs.keys(): log.error("Referenced function descriptor id='{0}' couldn't " "be found in path '{1}'".format(fid, self._dpath)) return vnf_id = function['vnf_id'] new_func = self._storage.create_function(path_vnfs[fid]) service.associate_function(new_func, vnf_id) return True @staticmethod def _load_project_service_file(project): """ Load descriptors from a SONATA SDK project. :param project: SDK project :return: True if successful, False otherwise """ # load project service descriptor (NSD) nsd_files = project.get_ns_descriptor() if not nsd_files: log.critical("Couldn't find a service descriptor in project '[0}'" .format(project.project_root)) return False if len(nsd_files) > 1: log.critical("Found multiple service descriptors in project " "'{0}': {1}" .format(project.project_root, nsd_files)) return False return nsd_files[0] @staticmethod def _find_graph_cycles(graph, node, prev_node=None, backtrace=None): if not backtrace: backtrace = [] # get node's neighbors neighbors = graph.neighbors(node) # remove previous node from neighbors if prev_node: neighbors.pop(neighbors.index(prev_node)) # ensure node has neighbors if not len(neighbors) > 0: return None # check is this node was already visited if node in backtrace: cycle = backtrace[backtrace.index(node):] return cycle # mark this node as visited and trace it backtrace.append(node) # iterate through neighbor nodes for neighbor in neighbors: return Validator._find_graph_cycles(graph, neighbor, prev_node=node, backtrace=backtrace) return backtrace
class Publisher(object): def __init__(self, workspace, project=None, component=None, catalogue=None): # Assign parameters coloredlogs.install(level=workspace.log_level) self._workspace = workspace self._project = project self._component = component self._catalogue = catalogue self._catalogue_clients = [] # Instantiate catalogue clients self.create_catalogue_clients() # Create a schema validator self._schema_validator = SchemaValidator(workspace) def create_catalogue_clients(self): """ Instantiates catalogue clients for selected catalogue servers :return: """ log.debug("Creating catalogue clients...") # If catalogue argument was specified # ignore default publish catalogues if self._catalogue: # Get corresponding catalogue from workspace config cat = self._workspace.get_catalogue_server(self._catalogue) print(type(cat)) if not cat: log.warning("The specified catalogue ID '{}' " "does not exist in workspace configuration".format( self._catalogue)) return # Instantiate catalogue client with the obtained address self._catalogue_clients.append(CatalogueClient(cat['url'])) # If catalogue argument is absent -> get default publish catalogues else: # Get publish catalogues from workspace config for cat in self._workspace.catalogue_servers: if cat['publish'].lower() == 'yes': self._catalogue_clients.append(CatalogueClient(cat['url'])) # Ensure there are catalogues available if not len(self._catalogue_clients) > 0: log.warning("There are no catalogue servers " "configured for publishing") return log.debug("Added {} catalogue clients".format( len(self._catalogue_clients))) def publish_project(self): """ Publish all components of a project to the available catalogue servers :return: """ log.info("Publishing project: '{}'".format(self._project.project_root)) # Ensure project was defined and its valid if not self._project or not Project.__is_valid__(self._project): log.error("Publish failed. Invalid or undefined project.") return # Retrieve project NSD and VNFDs files comp_list = self._project.get_ns_descriptor() + \ self._project.get_vnf_descriptors() log.debug("The following project components " "will be published: {}".format(comp_list)) # Publish project components for comp in comp_list: self.publish_component(comp) def publish_component(self, filename=None): """ Publish a single component file (e.g. descriptor) to the available catalogue servers :param filename: :return: """ # If filename parameter is absent, assume the component of object init if not filename: filename = self._component log.info("Publishing component: '{}'".format(filename)) # Check if file exists if not os.path.isfile(filename): log.error( "Publish failed. File '{}' does not exist.".format(filename)) return # Check that catalogue clients exist if not len(self._catalogue_clients) > 0: log.error("Publish failed. " "There are no catalogue clients available.") return # Load component descriptor with open(filename, 'r') as compf: compd = yaml.load(compf) # Determine descriptor type of component descriptor_type = self._schema_validator.get_descriptor_type(compd) comp_data = yaml.dump(compd) # Publish to the catalogue servers based on the descriptor type errors_publishing = False for cat_client in self._catalogue_clients: if descriptor_type is SchemaValidator.SCHEMA_PACKAGE_DESCRIPTOR: log.debug("Publishing Package Descriptor: {}".format(filename)) if not cat_client.post_pd(comp_data): errors_publishing = True elif descriptor_type is SchemaValidator.SCHEMA_SERVICE_DESCRIPTOR: log.debug("Publishing Service Descriptor: {}".format(filename)) if not cat_client.post_ns(comp_data): errors_publishing = True elif descriptor_type is SchemaValidator.SCHEMA_FUNCTION_DESCRIPTOR: log.debug( "Publishing Function Descriptor: {}".format(filename)) if not cat_client.post_vnf(comp_data): errors_publishing = True if errors_publishing: log.critical("The publishing of one or more components has failed") exit(1)
class Publisher(object): def __init__(self, workspace, project=None, component=None, catalogue=None): # Assign parameters coloredlogs.install(level=workspace.log_level) self._workspace = workspace self._project = project self._component = component self._catalogue = catalogue self._catalogue_clients = [] # Instantiate catalogue clients self.create_catalogue_clients() # Create a schema validator self._schema_validator = SchemaValidator(workspace) def create_catalogue_clients(self): """ Instantiates catalogue clients for selected catalogue servers :return: """ log.debug("Creating catalogue clients...") # If catalogue argument was specified # ignore default publish catalogues if self._catalogue: # Get corresponding catalogue from workspace config cat = self._workspace.get_catalogue_server(self._catalogue) print(type(cat)) if not cat: log.warning("The specified catalogue ID '{}' " "does not exist in workspace configuration" .format(self._catalogue)) return # Instantiate catalogue client with the obtained address self._catalogue_clients.append(CatalogueClient(cat['url'])) # If catalogue argument is absent -> get default publish catalogues else: # Get publish catalogues from workspace config for cat in self._workspace.catalogue_servers: if cat['publish'].lower() == 'yes': self._catalogue_clients.append(CatalogueClient(cat['url'])) # Ensure there are catalogues available if not len(self._catalogue_clients) > 0: log.warning("There are no catalogue servers " "configured for publishing") return log.debug("Added {} catalogue clients" .format(len(self._catalogue_clients))) def publish_project(self): """ Publish all components of a project to the available catalogue servers :return: """ log.info("Publishing project: '{}'".format(self._project.project_root)) # Ensure project was defined and its valid if not self._project or not Project.__is_valid__(self._project): log.error("Publish failed. Invalid or undefined project.") return # Retrieve project NSD and VNFDs files comp_list = self._project.get_ns_descriptor() + \ self._project.get_vnf_descriptors() log.debug("The following project components " "will be published: {}".format(comp_list)) # Publish project components for comp in comp_list: self.publish_component(comp) def publish_component(self, filename=None): """ Publish a single component file (e.g. descriptor) to the available catalogue servers :param filename: :return: """ # If filename parameter is absent, assume the component of object init if not filename: filename = self._component log.info("Publishing component: '{}'".format(filename)) # Check if file exists if not os.path.isfile(filename): log.error("Publish failed. File '{}' does not exist." .format(filename)) return # Check that catalogue clients exist if not len(self._catalogue_clients) > 0: log.error("Publish failed. " "There are no catalogue clients available.") return # Load component descriptor with open(filename, 'r') as compf: compd = yaml.load(compf) # Determine descriptor type of component descriptor_type = self._schema_validator.get_descriptor_type(compd) comp_data = yaml.dump(compd) # Publish to the catalogue servers based on the descriptor type errors_publishing = False for cat_client in self._catalogue_clients: if descriptor_type is SchemaValidator.SCHEMA_PACKAGE_DESCRIPTOR: log.debug("Publishing Package Descriptor: {}".format(filename)) if not cat_client.post_pd(comp_data): errors_publishing = True elif descriptor_type is SchemaValidator.SCHEMA_SERVICE_DESCRIPTOR: log.debug("Publishing Service Descriptor: {}" .format(filename)) if not cat_client.post_ns(comp_data): errors_publishing = True elif descriptor_type is SchemaValidator.SCHEMA_FUNCTION_DESCRIPTOR: log.debug("Publishing Function Descriptor: {}" .format(filename)) if not cat_client.post_vnf(comp_data): errors_publishing = True if errors_publishing: log.critical("The publishing of one or more components has failed") exit(1)