def output_to_yaml(self, tosca): self.log.debug(_('Converting translated output to yaml format.')) dict_output = OrderedDict() dict_output.update( {'tosca_definitions_version': tosca['tosca_definitions_version']}) # Description desc_str = "" if ToscaResource.DESC in tosca: # Wrap the text to a new line if the line exceeds 80 characters. wrapped_txt = "\n ". \ join(textwrap.wrap(tosca[ToscaResource.DESC], 80)) desc_str = ToscaResource.DESC + ": >\n " + \ wrapped_txt + "\n\n" dict_output.update({ToscaResource.DESC: tosca[ToscaResource.DESC]}) if ToscaResource.METADATA in tosca: dict_output.update( {ToscaResource.METADATA: tosca[ToscaResource.METADATA]}) if ToscaResource.IMPORT in tosca: dict_output.update( {ToscaResource.IMPORT: tosca[ToscaResource.IMPORT]}) # Add all types types_list = [ ToscaResource.DATA_TYPES, ToscaResource.CAPABILITY_TYPES, ToscaResource.NODE_TYPES, ToscaResource.ARTIFACT_TYPES, ToscaResource.GROUP_TYPES, ToscaResource.POLICY_TYPES ] for typ in types_list: if typ in tosca: dict_output.update({typ: self.ordered_nodes(tosca[typ])}) # Add topology template topo_list = [ ToscaResource.INPUTS, ToscaResource.NODE_TMPL, ToscaResource.GROUPS, ToscaResource.POLICIES, ToscaResource.OUTPUTS ] if ToscaResource.TOPOLOGY_TMPL in tosca: tmpl = OrderedDict() for typ in tosca[ToscaResource.TOPOLOGY_TMPL]: if typ != ToscaResource.SUBSTITUTION_MAPPING: tmpl.update({ typ: self.ordered_nodes( tosca[ToscaResource.TOPOLOGY_TMPL][typ]) }) else: tmpl.update({ typ: self.ordered_nodes_sub_mapping( tosca[ToscaResource.TOPOLOGY_TMPL][typ]) }) dict_output.update({ToscaResource.TOPOLOGY_TMPL: tmpl}) yaml.add_representer(OrderedDict, self.represent_ordereddict) yaml_string = yaml.dump(dict_output, default_flow_style=False) # get rid of the '' from yaml.dump around numbers yaml_string = yaml_string.replace('\'', '') self.log.debug(_("YAML output:\n{0}").format(yaml_string)) return yaml_string
def process_mgmt_intf(intf): if len(self.mgmt_intf) > 0: err_msg( _("{0}, Already processed another mgmt intf {1}, " "got another {2}").format(self, self.msmg_intf, intf)) self.log.error(err_msg) raise ValidationError(message=err_msg) self.mgmt_intf['protocol'] = 'tcp' if self.VNF_CONFIG in intf: process_vnf_config(intf.pop(self.VNF_CONFIG)) if self.PORT in intf: self.mgmt_intf[self.PORT] = intf.pop(self.PORT) self.props[self.PORT] = self.mgmt_intf[self.PORT] if 'vdu_id' in intf: for vdu in self.vdus: if intf['vdu_id'] == vdu.id: self.mgmt_intf[self.VDU] = vdu.get_name(self.name) intf.pop('vdu_id') break if self.DASHBOARD_PARAMS in intf: self.mgmt_intf[self.DASHBOARD_PARAMS] = \ intf.pop(self.DASHBOARD_PARAMS) if len(intf): self.log.warn( _("{0}, Did not process all in mgmt " "interface {1}").format(self, intf)) self.log.debug( _("{0}, Management interface: {1}").format( self, self.mgmt_intf))
def main(self, raw_args=None, log=None): args = self._parse_args(raw_args) if log is None: if args.debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.ERROR) log = logging.getLogger("yang-translator") log.debug(_("Args passed is {}").format(args)) self.log = log self.in_files = [] self.ftype = None for f in args.template_file: path = os.path.abspath(f[0]) if not os.path.isfile(path): msg = _("The path %(path)s is not a valid file.") % { 'path': path } log.error(msg) raise ValueError(msg) # Get the file type ftype = self._get_file_type(path) if self.ftype is None: self.ftype = ftype elif self.ftype != ftype: msg = (_("All input files hould be of same type")) log.error(msg) raise ValueError(msg) self.in_files.append(path) self.log.debug(_("Input files are of type {0}").format(self.ftype)) self.archive = None self._translate(output_dir=args.output_dir, archive=args.archive)
def process_initial_config(dic): icp = {} self.log.debug(_("{0}, initial config: {1}").format(self, dic)) for key in [self.NAME, self.SEQ, self.USER_DEF_SCRIPT]: if key in dic: icp[key] = dic.pop(key) params = [] if self.PARAM in dic: for p in dic.pop(self.PARAM): if (self.NAME in p and self.VALUE in p): params.append({ self.NAME: p[self.NAME], self.VALUE: p[self.VALUE] }) else: # TODO (pjoseph): Need to add support to read the # config file and get the value from that self.log.warn( _("{0}, Got parameter without value: {1}").format( self, p)) if len(params): icp[self.PARAM] = params if len(dic): self.log.warn( _("{0}, Did not process all fields for {1}").format( self, dic)) self.log.debug(_("{0}, Initial config {1}").format(self, icp)) self.initial_cfg.append({self.PROPERTIES: icp})
def generate_vnf_template(self, tosca, index): self.log.debug( _("{0}, Generate tosca template for VNF {1}").format( self, index, tosca)) for vdu in self.vdus: tosca = vdu.generate_vdu_template(tosca, self.name) node = {} node[self.TYPE] = self.T_VNF1 # Remove fields not required in TOSCA self.props.pop(self.DESC) # Update index to the member-vnf-index self.props[self.ID] = index node[self.PROPERTIES] = self.props caps = {} if len(self.mgmt_intf): caps[self.MGMT_INTF] = {self.PROPERTIES: self.mgmt_intf} if len(self.http_ep): caps[self.HTTP_EP] = {self.PROPERTIES: self.http_ep[0]} if len(self.http_ep) > 1: self.log.warn( _("{0}: Currently only one HTTP endpoint " "supported: {1}").format(self, self.http_ep)) if len(self.mon_param): count = 0 for monp in self.mon_param: name = "{}_{}".format(self.MON_PARAM, count) caps[name] = {self.PROPERTIES: monp} count += 1 node[self.CAPABILITIES] = caps if len(self.vdus): reqs = [] for vdu in self.vdus: reqs.append({'vdus': {self.NODE: vdu.get_name(self.name)}}) node[self.REQUIREMENTS] = reqs else: self.log.warn( _("{0}, Did not find any VDUS with this VNF").format(self)) self.log.debug(_("{0}, VNF node: {1}").format(self, node)) tosca[self.TOPOLOGY_TMPL][self.NODE_TMPL][self.name] = node return tosca
def set_cp_type(self, name, cp_type): for idx, cp in enumerate(self.ext_cp): if cp[self.NAME] == name: cp[self.CP_TYPE] = cp_type self.ext_cp[idx] = cp self.log.debug( _("{0}, Updated CP: {1}").format(self, self.ext_cp[idx])) return err_msg = (_("{0}, Did not find connection point {1}").format( self, name)) self.log.error(err_msg) raise ValidationError(message=err_msg)
def set_cp_type(self, name, cp_type): for idx, cp in enumerate(self.ext_cp): if cp[self.NAME] == name: cp[self.CP_TYPE] = cp_type self.ext_cp[idx] = cp self.log.debug(_("{0}, Updated CP: {1}"). format(self, self.ext_cp[idx])) return err_msg = (_("{0}, Did not find connection point {1}"). format(self, name)) self.log.error(err_msg) raise ValidationError(message=err_msg)
def _translate_yang(self): self.log.debug(_('Translating the descriptors.')) if self.NSD in self.yangs: for nsd in self.yangs[self.NSD]: self.log.debug( _("Translate descriptor of type nsd: {}").format(nsd)) node_name = nsd.pop(ToscaResource.NAME).replace(' ', '_') node_name = node_name if node_name.endswith( 'nsd') else ''.join([node_name, '_nsd']) tosca_node = TranslateDescriptors. \ YANG_TO_TOSCA_TYPE[self.NSD]( self.log, node_name, self.NSD, nsd, self.vnfd_files) self.tosca_resources.append(tosca_node) vnfd_name_list = [] if self.VNFD in self.yangs: for vnfd in self.yangs[self.VNFD]: if vnfd['name'] not in vnfd_name_list: self.log.debug( _("Translate descriptor of type vnfd: {}").format( vnfd)) vnfd_name_list.append(vnfd['name']) tosca_node = TranslateDescriptors. \ YANG_TO_TOSCA_TYPE[self.VNFD]( self.log, vnfd.pop(ToscaResource.NAME), self.VNFD, vnfd) self.tosca_resources.append(tosca_node) # First translate VNFDs for node in self.tosca_resources: if node.type == self.VNFD: self.log.debug( _("Handle yang for {0} of type {1}").format( node.name, node.type_)) node.handle_yang() # Now translate NSDs for node in self.tosca_resources: if node.type == self.NSD: self.log.debug( _("Handle yang for {0} of type {1}").format( node.name, node.type_)) node.handle_yang(self.tosca_resources) return self.tosca_resources
def process_http_ep(eps): self.log.debug("{}, HTTP EP: {}".format(self, eps)) for ep in eps: http_ep = {'protocol': 'http'} # Required for TOSCA http_ep[self.PATH] = ep.pop(self.PATH) http_ep[self.PORT] = ep.pop(self.PORT) http_ep[self.POLL_INTVL] = ep.pop(self.POLL_INTVL_SECS) if len(ep): self.log.warn( _("{0}, Did not process the following for " "http ep {1}").format(self, ep)) self.log.debug( _("{0}, http endpoint: {1}").format(self, http_ep)) self.http_ep.append(http_ep)
def process_input_param(param): if self.XPATH in param: val = param.pop(self.XPATH) # Strip namesapce, catalog and nsd part self.inputs.append({ self.NAME: self.map_yang_name_to_tosca( val.replace('/nsd:nsd-catalog/nsd:nsd/nsd:', '')) }) if len(param): self.log.warn( _("{0}, Did not process the following for " "input param {1}: {2}").format(self, self.inputs, param)) self.log.debug(_("{0}, inputs: {1}").format(self, self.inputs[-1]))
def _generate_type_map(log): '''Generate YANG translation types map. Load user defined classes from location path specified in conf file. Base classes are located within the yang directory. ''' # Base types directory BASE_PATH = 'rift/mano/yang_translator/rwmano/yang' # Custom types directory defined in conf file custom_path = translatorConfig.get_value('DEFAULT', 'custom_types_location') # First need to load the parent module, for example 'contrib.mano', # for all of the dynamically loaded classes. classes = [] TranslateDescriptors._load_classes(log, (BASE_PATH, custom_path), classes) try: types_map = {clazz.yangtype: clazz for clazz in classes} log.debug(_("Type maps loaded: {}").format(types_map.keys())) except AttributeError as e: raise YangClassAttributeError(message=e.message) return types_map
def get_supporting_files(self): files = [] for vnfd in self.vnfds.values(): f = vnfd.get_supporting_files() if f and len(f): files.extend(f) # Get the config files for initial config for icp in self.initial_cfg: if self.USER_DEF_SCRIPT in icp: script = os.path.basename(icp[self.USER_DEF_SCRIPT]) files.append({ self.TYPE: 'script', self.NAME: script, self.DEST: "{}/{}".format(self.SCRIPT_DIR, script), }) # TODO (pjoseph): Add support for config scripts, # charms, etc self.log.debug(_("{0}, supporting files: {1}").format(self, files)) return files
def generate_tosca_type(self, tosca): self.log.debug(_("{0} Generate tosa types").format(self, tosca)) # Add custom artifact type if self.ARTIFACT_TYPES not in tosca: tosca[self.ARTIFACT_TYPES] = {} if self.T_ARTF_QCOW2 not in tosca[self.ARTIFACT_TYPES]: tosca[self.ARTIFACT_TYPES][self.T_ARTF_QCOW2] = { self.DERIVED_FROM: 'tosca.artifacts.Deployment.Image.VM.QCOW2', self.IMAGE_CHKSUM: { self.TYPE: self.STRING, self.REQUIRED: self.NO }, } if self.T_VDU1 not in tosca[self.NODE_TYPES]: tosca[self.NODE_TYPES][self.T_VDU1] = { self.DERIVED_FROM: 'tosca.nodes.nfv.VDU', self.PROPERTIES: { self.COUNT: { self.TYPE: self.INTEGER, self.DEFAULT: 1 }, self.CLOUD_INIT: { self.TYPE: self.STRING, self.DEFAULT: '#cloud-config' }, }, self.CAPABILITIES: { self.VIRT_LINK: { self.TYPE: 'tosca.capabilities.nfv.VirtualLinkable' }, }, } # Add CP type if self.T_CP1 not in tosca[self.NODE_TYPES]: tosca[self.NODE_TYPES][self.T_CP1] = { self.DERIVED_FROM: 'tosca.nodes.nfv.CP', self.PROPERTIES: { self.NAME: { self.TYPE: self.STRING, self.DESC: 'Name of the connection point' }, self.CP_TYPE: { self.TYPE: self.STRING, self.DESC: 'Type of the connection point' }, self.VDU_INTF_NAME: { self.TYPE: self.STRING, self.DESC: 'Name of the interface on VDU' }, self.VDU_INTF_TYPE: { self.TYPE: self.STRING, self.DESC: 'Type of the interface on VDU' }, }, } return tosca
def __init__(self, log, yangs, tosca_template): self.log = log self.yangs = yangs self.tosca_template = tosca_template # list of all TOSCA resources generated self.tosca_resources = [] self.metadata = {} log.debug(_('Mapping between YANG nodetemplate and TOSCA resource.'))
def getTraceString(traceList): traceString = '' for entry in traceList: f, l, m, c = entry[0], entry[1], entry[2], entry[3] traceString += (_('\t\tFile %(file)s, line %(line)s, in ' '%(method)s\n\t\t\t%(call)s\n') % {'file': f, 'line': l, 'method': m, 'call': c}) return traceString
def output_to_yaml(self, tosca): self.log.debug(_('Converting translated output to yaml format.')) dict_output = OrderedDict() dict_output.update({'tosca_definitions_version': tosca['tosca_definitions_version']}) # Description desc_str = "" if ToscaResource.DESC in tosca: # Wrap the text to a new line if the line exceeds 80 characters. wrapped_txt = "\n ". \ join(textwrap.wrap(tosca[ToscaResource.DESC], 80)) desc_str = ToscaResource.DESC + ": >\n " + \ wrapped_txt + "\n\n" dict_output.update({ToscaResource.DESC: tosca[ToscaResource.DESC]}) if ToscaResource.METADATA in tosca: dict_output.update({ToscaResource.METADATA: tosca[ToscaResource.METADATA]}) # Add all types types_list = [ToscaResource.DATA_TYPES, ToscaResource.CAPABILITY_TYPES, ToscaResource.NODE_TYPES, ToscaResource.GROUP_TYPES, ToscaResource.POLICY_TYPES] for typ in types_list: if typ in tosca: dict_output.update({typ: self.ordered_nodes(tosca[typ])}) # Add topology template topo_list = [ToscaResource.INPUTS, ToscaResource.NODE_TMPL, ToscaResource.GROUPS, ToscaResource.POLICIES, ToscaResource.OUTPUTS] if ToscaResource.TOPOLOGY_TMPL in tosca: tmpl = OrderedDict() for typ in tosca[ToscaResource.TOPOLOGY_TMPL]: tmpl.update({typ: self.ordered_nodes( tosca[ToscaResource.TOPOLOGY_TMPL][typ])}) dict_output.update({ToscaResource.TOPOLOGY_TMPL: tmpl}) yaml.add_representer(OrderedDict, self.represent_ordereddict) yaml_string = yaml.dump(dict_output, default_flow_style=False) # get rid of the '' from yaml.dump around numbers yaml_string = yaml_string.replace('\'', '') self.log.debug(_("YAML output:\n{0}").format(yaml_string)) return yaml_string
def assertExceptionMessage(exception, message): err_msg = exception.__name__ + ': ' + message report = ExceptionCollector.getExceptionsReport(False) assert err_msg in report, ( _('Could not find "%(msg)s" in "%(rep)s".') % { 'rep': report.__str__(), 'msg': err_msg })
def process_const_vnfd(cvnfd): # Get the matching VNFD vnfd_id = cvnfd.pop(self.VNFD_ID_REF) for vnfd in vnfds: if vnfd.type == self.VNFD and vnfd.id == vnfd_id: self.vnfds[cvnfd.pop(self.MEM_VNF_INDEX)] = vnfd if self.START_BY_DFLT in cvnfd: vnfd.props[self.START_BY_DFLT] = \ cvnfd.pop(self.START_BY_DFLT) break if len(cvnfd): self.log.warn( _("{0}, Did not process the following for " "constituent vnfd {1}: {2}").format( self, vnfd_id, cvnfd)) self.log.debug(_("{0}, VNFD: {1}").format(self, self.vnfds))
def set_vld(self, name, vld_name): cp = self.get_cp(name) if cp: cp[self.VLD] = vld_name else: err_msg = (_("{0}, Did not find connection point {1}").format( self, name)) self.log.error(err_msg) raise ValidationError(message=err_msg)
def output_to_tosca(self): self.log.debug(_('Converting translated output to tosca template.')) templates = {} for resource in self.resources: # Each NSD should generate separate templates if resource.type == 'nsd': tmpl = resource.generate_tosca_type() tmpl = resource.generate_tosca_template(tmpl) self.log.debug(_("TOSCA template generated for {0}:\n{1}"). format(resource.name, tmpl)) templates[resource.name] = {self.TOSCA: self.output_to_yaml(tmpl)} files = resource.get_supporting_files() if len(files): templates[resource.name][self.FILES] = files return templates
def set_vld(self, name, vld_name): cp = self.get_cp(name) if cp: cp[self.VLD] = vld_name else: err_msg = (_("{0}, Did not find connection point {1}"). format(self, name)) self.log.error(err_msg) raise ValidationError(message=err_msg)
def _translate(self, output_dir=None, archive=False): output = None self.log.debug(_('Loading the yang template for {0}.'). format(self.in_files)) translator = YangTranslator(self.log, files=self.in_files) self.log.debug(_('Translating the yang template for {0}.'). format(self.in_files)) output = translator.translate() if output: if output_dir: translator.write_output(output, output_dir=output_dir, archive=archive) else: for key in output.keys(): print(_("TOSCA Template {0}:\n{1}"). format(key, output[key])) else: self.log.error(_("Did not get any translated output!!"))
def generate_inv_schema_property_error(self, attr, value, valid_values): msg = (_('Schema definition of "%(propname)s" has ' '"%(attr)s" attribute with invalid value ' '"%(value1)s". The value must be one of ' '"%(value2)s".') % {"propname": self.name, "attr": attr, "value1": value, "value2": valid_values}) ExceptionCollector.appendException( InvalidSchemaError(message=msg))
def __init__(self, **kwargs): try: self.message = self.msg_fmt % kwargs except KeyError: exc_info = sys.exc_info() log.exception(_('Exception in string format operation: %s') % exc_info[1]) if YANGException._FATAL_EXCEPTION_FORMAT_ERRORS: raise exc_info[0]
def process_mon_param(params): for param in params: monp = {} fields = [ self.NAME, self.ID, 'value_type', 'units', 'group_tag', 'json_query_method', 'http_endpoint_ref', 'widget_type', self.DESC ] for key in fields: if key in param: monp[key] = param.pop(key) if len(param): self.log.warn( _("{0}, Did not process the following for " "monitporing-param {1}").format(self, param)) self.log.debug( _("{0}, Monitoring param: {1}").format(self, monp)) self.mon_param.append(monp)
def _translate(self, output_dir=None, archive=False): output = None self.log.debug( _('Loading the yang template for {0}.').format(self.in_files)) translator = YangTranslator(self.log, files=self.in_files) self.log.debug( _('Translating the yang template for {0}.').format(self.in_files)) output = translator.translate() if output: if output_dir: translator.write_output(output, output_dir=output_dir, archive=archive) else: for key in output.keys(): print( _("TOSCA Template {0}:\n{1}").format(key, output[key])) else: self.log.error(_("Did not get any translated output!!"))
def _load_config(cls, conf_file): '''Private method only to be called once from the __init__ module''' cls._translator_config = configparser.ConfigParser() try: cls._translator_config.read(conf_file) except configparser.ParsingError: msg = _('Unable to parse translator.conf file.' 'Check to see that it exists in the conf directory.') raise exception.ConfFileParseError(message=msg)
def __init__(self, log, name, type_, yang): self.log = log self.name = name self.type_ = type_ self.yang = yang self.id_ = None log.debug( _('Translating YANG node %(name)s of type %(type)s') % { 'name': self.name, 'type': self.type_ })
def generate_tosca_type(self, tosca): self.log.debug(_("{0} Generate tosa types"). format(self, tosca)) # Add custom artifact type if self.ARTIFACT_TYPES not in tosca: tosca[self.ARTIFACT_TYPES] = {} if self.T_ARTF_QCOW2 not in tosca[self.ARTIFACT_TYPES]: tosca[self.ARTIFACT_TYPES][self.T_ARTF_QCOW2] = { self.DERIVED_FROM: 'tosca.artifacts.Deployment.Image.VM.QCOW2', self.IMAGE_CHKSUM: {self.TYPE: self.STRING, self.REQUIRED: self.NO}, } if self.T_VDU1 not in tosca[self.NODE_TYPES]: tosca[self.NODE_TYPES][self.T_VDU1] = { self.DERIVED_FROM: 'tosca.nodes.nfv.VDU', self.PROPERTIES: { self.COUNT: {self.TYPE: self.INTEGER, self.DEFAULT: 1}, self.CLOUD_INIT: {self.TYPE: self.STRING, self.DEFAULT: '#cloud-config'}, }, self.CAPABILITIES: { self.VIRT_LINK: { self.TYPE: 'tosca.capabilities.nfv.VirtualLinkable' }, }, } # Add CP type if self.T_CP1 not in tosca[self.NODE_TYPES]: tosca[self.NODE_TYPES][self.T_CP1] = { self.DERIVED_FROM: 'tosca.nodes.nfv.CP', self.PROPERTIES: { self.NAME: {self.TYPE: self.STRING, self.DESC: 'Name of the connection point'}, self.CP_TYPE: {self.TYPE: self.STRING, self.DESC: 'Type of the connection point'}, self.VDU_INTF_NAME: {self.TYPE: self.STRING, self.DESC: 'Name of the interface on VDU'}, self.VDU_INTF_TYPE: {self.TYPE: self.STRING, self.DESC: 'Type of the interface on VDU'}, }, } return tosca
def process_vdu(self): self.log.debug( _("Process VDU desc {0}: {1}").format(self.name, self.yang)) vdu_dic = deepcopy(self.yang) vdu = {} fields = [ self.ID, self.COUNT, self.CLOUD_INIT, self.IMAGE, self.IMAGE_CHKSUM ] for key in fields: if key in vdu_dic: vdu[key] = vdu_dic.pop(key) self.id = vdu[self.ID] if self.VM_FLAVOR in vdu_dic: vdu[self.HOST] = {} for key, value in vdu_dic.pop(self.VM_FLAVOR).items(): vdu[self.HOST][self.VM_FLAVOR_MAP[key]] = "{}{}". \ format(value, self.VM_SIZE_UNITS_MAP[key]) if self.EXT_INTF in vdu_dic: for ext_intf in vdu_dic.pop(self.EXT_INTF): cp = {} cp[self.NAME] = ext_intf.pop(self.VNFD_CP_REF) cp[self.VDU_INTF_NAME] = ext_intf.pop(self.NAME) cp[self.VDU_INTF_TYPE] = ext_intf[self.VIRT_INTF][self.TYPE_Y] self.log.debug( _("{0}, External interface {1}: {2}").format( self, cp, ext_intf)) self.ext_cp.append(cp) self.remove_ignored_fields(vdu_dic) if len(vdu_dic): self.log.warn( _("{0}, Did not process the following in " "VDU: {1}").format(self, vdu_dic)) self.log.debug(_("{0} VDU: {1}").format(self, vdu)) self.props = vdu
def _translate_yang(self): self.log.debug(_('Translating the descriptors.')) for nsd in self.yangs[self.NSD]: self.log.debug( _("Translate descriptor of type nsd: {}").format(nsd)) tosca_node = TranslateDescriptors. \ YANG_TO_TOSCA_TYPE[self.NSD]( self.log, nsd.pop(ToscaResource.NAME), self.NSD, nsd) self.tosca_resources.append(tosca_node) for vnfd in self.yangs[self.VNFD]: self.log.debug( _("Translate descriptor of type vnfd: {}").format(vnfd)) tosca_node = TranslateDescriptors. \ YANG_TO_TOSCA_TYPE[self.VNFD]( self.log, vnfd.pop(ToscaResource.NAME), self.VNFD, vnfd) self.tosca_resources.append(tosca_node) # First translate VNFDs for node in self.tosca_resources: if node.type == self.VNFD: self.log.debug( _("Handle yang for {0} of type {1}").format( node.name, node.type_)) node.handle_yang() # Now translate NSDs for node in self.tosca_resources: if node.type == self.NSD: self.log.debug( _("Handle yang for {0} of type {1}").format( node.name, node.type_)) node.handle_yang(self.tosca_resources) return self.tosca_resources
def output_to_tosca(self): self.log.debug(_('Converting translated output to tosca template.')) templates = {} vnfd_templates = {} for resource in self.resources: if resource.type == 'vnfd': tmpl = resource.generate_tosca() tmpl = resource.generate_tosca_template(tmpl) self.log.debug( _("TOSCA template generated for {0}:\n{1}").format( resource.name, tmpl)) vnfd_templates[resource.name] = tmpl for resource in self.resources: # Each NSD should generate separate templates if resource.type == 'nsd': tmpl = resource.generate_tosca_type() tmpl = resource.generate_tosca_template(tmpl) self.log.debug( _("TOSCA template generated for {0}:\n{1}").format( resource.name, tmpl)) templates[resource.name] = { self.TOSCA: self.output_to_yaml(tmpl) } files = resource.get_supporting_files() if len(files): templates[resource.name][self.FILES] = files for resource in self.resources: if resource.type == 'vnfd': tmpl = vnfd_templates[resource.name] templates[resource.name] = { self.TOSCA: self.output_to_yaml(tmpl) } files = resource.get_supporting_files() if len(files): templates[resource.name][self.FILES] = files return templates
def compare_dicts(cls, generated, expected, log=None): """Compare two dictionaries and generate error if required""" if log: log.debug(_("Generated: {0}").format(generated)) log.debug(_("Expected: {0}").format(expected)) diff = DeepDiff(expected, generated) if log: log.debug(_("Keys in diff: {0}").format(diff.keys())) log.info(_("Differences:\n")) if log: d = pprint.pformat(diff, indent=cls.INDENT) log.info("Differences:\n{0}".format(d)) if len(set(cls.ERROR_ITEMS).intersection(diff.keys())): diff_str = pprint.pformat(diff) msg = _("Found item changes: {0}").format(diff_str) if log: log.error(msg) raise ValueError(msg)
def __init__(self, log, yangs=None, files=None, packages=[]): super(YangTranslator, self).__init__() self.log = log self.yangs = {} if yangs is not None: self.yangs = yangs self.files = files self.archive = None self.tosca_template = ToscaTemplate(log) self.node_translator = None self.pkgs = packages log.info(_('Initialized parameters for translation.'))
def process_mon_param(params): for param in params: monp = {} fields = [ self.NAME, self.ID, 'value_type', 'units', 'group_tag', 'json_query_method', 'http_endpoint_ref', 'widget_type', self.DESC ] mon_param = {} ui_param = {} if 'name' in param: mon_param['name'] = param['name'] if 'description' in param: mon_param['description'] = param['description'] if 'polling_interval' in param: mon_param['polling_interval'] = param['polling_interval'] if 'http_endpoint_ref' in param: mon_param['url_path'] = param['http_endpoint_ref'] if 'json_query_method' in param: mon_param['json_query_method'] = param[ 'json_query_method'].lower() #if 'value_type' in param: # mon_param['constraints'] = {} # mon_param['constraints']['value_type'] = YangVnfd.VALUE_TYPE_CONVERSION_MAP[param['value_type'].upper()] if 'group_tag' in param: ui_param['group_tag'] = param['group_tag'] if 'widget_type' in param: ui_param['widget_type'] = param['widget_type'].lower() if 'units' in param: ui_param['units'] = param['units'] mon_param['ui_data'] = ui_param self.mon_param.append(mon_param) if len(param): self.log.warn( _("{0}, Did not process the following for " "monitporing-param {1}").format(self, param)) self.log.debug( _("{0}, Monitoring param: {1}").format(self, monp))
def _translate_yang(self): self.log.debug(_('Translating the descriptors.')) for nsd in self.yangs[self.NSD]: self.log.debug(_("Translate descriptor of type nsd: {}"). format(nsd)) tosca_node = TranslateDescriptors. \ YANG_TO_TOSCA_TYPE[self.NSD]( self.log, nsd.pop(ToscaResource.NAME), self.NSD, nsd) self.tosca_resources.append(tosca_node) for vnfd in self.yangs[self.VNFD]: self.log.debug(_("Translate descriptor of type vnfd: {}"). format(vnfd)) tosca_node = TranslateDescriptors. \ YANG_TO_TOSCA_TYPE[self.VNFD]( self.log, vnfd.pop(ToscaResource.NAME), self.VNFD, vnfd) self.tosca_resources.append(tosca_node) # First translate VNFDs for node in self.tosca_resources: if node.type == self.VNFD: self.log.debug(_("Handle yang for {0} of type {1}"). format(node.name, node.type_)) node.handle_yang() # Now translate NSDs for node in self.tosca_resources: if node.type == self.NSD: self.log.debug(_("Handle yang for {0} of type {1}"). format(node.name, node.type_)) node.handle_yang(self.tosca_resources) return self.tosca_resources
def process_scale_grp(dic): sg = {} self.log.debug(_("{0}, scale group: {1}").format(self, dic)) fields = [self.NAME, self.MIN_INST_COUNT, self.MAX_INST_COUNT] for key in fields: if key in dic: sg[key] = dic.pop(key) membs = {} for vnfd_memb in dic.pop(self.VNFD_MEMBERS): vnfd_idx = vnfd_memb[self.MEM_VNF_INDEX_REF] if vnfd_idx in self.vnfds: membs[self.vnfds[vnfd_idx].name] = \ vnfd_memb[self.COUNT] sg['vnfd_members'] = membs trigs = {} if self.SCALE_ACT in dic: for sg_act in dic.pop(self.SCALE_ACT): # Validate the primitive prim = sg_act.pop(self.NS_CONF_PRIM_REF) for cprim in self.conf_prims: if cprim[self.NAME] == prim: trigs[sg_act.pop(self.TRIGGER)] = prim break if len(sg_act): err_msg = ( _("{0}, Did not find config-primitive {1}").format( self, prim)) self.log.error(err_msg) raise ValidationError(message=err_msg) sg[self.CONFIG_ACTIONS] = trigs if len(dic): self.log.warn( _("{0}, Did not process all fields for {1}").format( self, dic)) self.log.debug(_("{0}, Scale group {1}").format(self, sg)) self.scale_grps.append(sg)
def main(self, log, args): self.log = log self.log.debug(_("Args: {0}").format(args)) if args.type not in self.SUPPORTED_TYPES: self.log.error(_("Unsupported file type {0}").format(args.type)) exit(1) with open(args.generated) as g: gen_data = g.read() if args.type == 'yaml': y_gen = yaml.load(gen_data) else: y_gen = json.loads(gen_data) with open(args.expected) as e: exp_data = e.read() if args.type == 'yaml': y_exp = yaml.load(exp_data) else: y_exp = json.loads(exp_data) self.compare_dicts(y_gen, y_exp, log=self.log)
def process_vdu(self): self.log.debug(_("Process VDU desc {0}: {1}").format(self.name, self.yang)) vdu_dic = deepcopy(self.yang) vdu = {} fields = [self.ID, self.COUNT, self.CLOUD_INIT, self.IMAGE, self.IMAGE_CHKSUM] for key in fields: if key in vdu_dic: vdu[key] = vdu_dic.pop(key) self.id = vdu[self.ID] if self.VM_FLAVOR in vdu_dic: vdu[self.HOST] = {} for key, value in vdu_dic.pop(self.VM_FLAVOR).items(): vdu[self.HOST][self.VM_FLAVOR_MAP[key]] = "{}{}". \ format(value, self.VM_SIZE_UNITS_MAP[key]) if self.EXT_INTF in vdu_dic: for ext_intf in vdu_dic.pop(self.EXT_INTF): cp = {} cp[self.NAME] = ext_intf.pop(self.VNFD_CP_REF) cp[self.VDU_INTF_NAME] = ext_intf.pop(self.NAME) cp[self.VDU_INTF_TYPE] = ext_intf[self.VIRT_INTF][self.TYPE_Y] self.log.debug(_("{0}, External interface {1}: {2}"). format(self, cp, ext_intf)) self.ext_cp.append(cp) self.remove_ignored_fields(vdu_dic) if len(vdu_dic): self.log.warn(_("{0}, Did not process the following in " "VDU: {1}"). format(self, vdu_dic)) self.log.debug(_("{0} VDU: {1}").format(self, vdu)) self.props = vdu
def _get_file_type(self, path): m = magic.open(magic.MAGIC_MIME) m.load() typ = m.file(path) if typ.startswith('text/plain'): # Assume to be yaml return self.YAML elif typ.startswith('application/x-gzip'): return self.TAR else: msg = _("The file {0} is not a supported type: {1}"). \ format(path, typ) self.log.error(msg) raise ValueError(msg)
def main(self, log, args): self.log = log self.log.debug(_("Args: {0}").format(args)) if args.type not in self.SUPPORTED_TYPES: self.log.error(_("Unsupported file type {0}"). format(args.type)) exit(1) with open(args.generated) as g: gen_data = g.read() if args.type == 'yaml': y_gen = yaml.load(gen_data) else: y_gen = json.loads(gen_data) with open(args.expected) as e: exp_data = e.read() if args.type == 'yaml': y_exp = yaml.load(exp_data) else: y_exp = json.loads(exp_data) self.compare_dicts(y_gen, y_exp, log=self.log)
def main(self, raw_args=None, log=None): args = self._parse_args(raw_args) if log is None: if args.debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.ERROR) log = logging.getLogger("yang-translator") log.debug(_("Args passed is {}").format(args)) self.log = log self.in_files = [] self.ftype = None for f in args.template_file: path = os.path.abspath(f[0]) if not os.path.isfile(path): msg = _("The path %(path)s is not a valid file.") % { 'path': path} log.error(msg) raise ValueError(msg) # Get the file type ftype = self._get_file_type(path) if self.ftype is None: self.ftype = ftype elif self.ftype != ftype: msg = (_("All input files hould be of same type")) log.error(msg) raise ValueError(msg) self.in_files.append(path) self.log.debug(_("Input files are of type {0}"). format(self.ftype)) self.archive = None self._translate(output_dir=args.output_dir, archive=args.archive)
def write_output(self, output, output_dir=None, archive=False,): if output: zip_files = [] for key in output.keys(): if output_dir: zf = self._create_csar_files(output_dir, key, output[key], archive=archive,) zip_files.append(zf) else: print(_("TOSCA Template {0}:\n{1}"). format(key, output[key])) return zip_files
def get_supporting_files(self): files = [] for image in self.images: image_name = os.path.basename(image) files.append({ self.TYPE: 'image', self.NAME: image_name, self.DEST: "{}/{}".format(self.IMAGE_DIR, image_name), }) self.log.debug(_("Supporting files for {} : {}").format(self, files)) if not len(files): shutil.rmtree(out_dir) return files
def ordered_node(self, node): order = [ToscaResource.TYPE, ToscaResource.DERIVED_FROM, ToscaResource.DESC, ToscaResource.MEMBERS, ToscaResource.PROPERTIES, ToscaResource.CAPABILITIES, ToscaResource.REQUIREMENTS,ToscaResource.ARTIFACTS, ToscaResource.INTERFACES] new_node = OrderedDict() for ent in order: if ent in node: new_node.update({ent: node.pop(ent)}) # Check if we missed any entry if len(node): self.log.warn(_("Did not sort these entries: {0}"). format(node)) new_node.update(node) return new_node
def translate_metadata(self): """Translate and store the metadata in instance""" FIELDS_MAP = { 'ID': 'name', 'vendor': 'vendor', 'version': 'version', } metadata = {} # Initialize to default values metadata['name'] = 'yang_to_tosca' metadata['vendor'] = 'RIFT.io' metadata['version'] = '1.0' if 'nsd' in self.yangs: yang_meta = self.yang['nsd'][0] elif 'vnfd' in self.yangs: yang_meta = self.yang['vnfd'][0] for key in FIELDS_MAP: if key in yang_meta.keys(): metadata[key] = str(yang_meta[FIELDS_MAP[key]]) self.log.debug(_("Metadata {0}").format(metadata)) self.metadata = metadata
def get_yangs(self): '''Get the descriptors and convert to yang instances''' for filename in self.files: self.log.debug(_("Load file {0}").format(filename)) # Only one descriptor per file if tarfile.is_tarfile(filename): tar = open(filename, "r+b") archive = TarPackageArchive(self.log, tar) pkg = archive.create_package() self.pkgs.append(pkg) desc_type = pkg.descriptor_type if desc_type == TranslateDescriptors.NSD: if TranslateDescriptors.NSD not in self.yangs: self.yangs[TranslateDescriptors.NSD] = [] self.yangs[TranslateDescriptors.NSD]. \ append(pkg.descriptor_msg.as_dict()) elif desc_type == TranslateDescriptors.VNFD: if TranslateDescriptors.VNFD not in self.yangs: self.yangs[TranslateDescriptors.VNFD] = [] self.yangs[TranslateDescriptors.VNFD]. \ append(pkg.descriptor_msg.as_dict()) else: raise ValidationError("Unknown descriptor type: {}". format(desc_type))
def _load_classes(log, locations, classes): '''Dynamically load all the classes from the given locations.''' for cls_path in locations: # Use the absolute path of the class path abs_path = os.path.dirname(os.path.abspath(__file__)) abs_path = abs_path.replace('rift/mano/yang_translator/rwmano', cls_path) log.debug(_("Loading classes from %s") % abs_path) # Grab all the yang type module files in the given path mod_files = [f for f in os.listdir(abs_path) if ( f.endswith('.py') and not f.startswith('__init__') and f.startswith('yang_'))] # For each module, pick out the target translation class for f in mod_files: f_name, ext = f.rsplit('.', 1) mod_name = cls_path + '/' + f_name mod_name = mod_name.replace('/', '.') try: mod = importlib.import_module(mod_name) target_name = getattr(mod, 'TARGET_CLASS_NAME') clazz = getattr(mod, target_name) classes.append(clazz) except ImportError: raise YangModImportError(mod_name=mod_name) except AttributeError: if target_name: raise YangClassImportError(name=target_name, mod_name=mod_name) else: # TARGET_CLASS_NAME is not defined in module. # Re-raise the exception raise
def generate_vdu_template(self, tosca, vnf_name): self.log.debug(_("{0} Generate tosca template for {2}"). format(self, tosca, vnf_name)) name = self.get_name(vnf_name) node = {} node[self.TYPE] = self.T_VDU1 if self.HOST in self.props: node[self.CAPABILITIES] = { self.HOST: {self.PROPERTIES: self.props.pop(self.HOST)} } else: self.log.warn(_("{0}, Does not have host requirements defined"). format(self)) if self.IMAGE in self.props: img_name = "{}_{}_vm_image".format(vnf_name, self.name) image = "../{}/{}".format(self.IMAGE_DIR, self.props.pop(self.IMAGE)) self.images.append(image) node[self.ARTIFACTS] = {img_name: { self.FILE: image, self.TYPE: self.T_ARTF_QCOW2, }} if self.IMAGE_CHKSUM in self.props: node[self.ARTIFACTS][img_name][self.IMAGE_CHKSUM] = \ self.props.pop(self.IMAGE_CHKSUM) node[self.INTERFACES] = {'Standard': { 'create': img_name }} # Remove self.props.pop(self.ID) node[self.PROPERTIES] = self.props self.log.debug(_("{0}, VDU node: {1}").format(self, node)) tosca[self.TOPOLOGY_TMPL][self.NODE_TMPL][name] = node # Generate the connection point templates for cp in self.ext_cp: cpt = {self.TYPE: self.T_CP1} cpt[self.REQUIREMENTS] = [] cpt[self.REQUIREMENTS].append({self.VIRT_BIND: { self.NODE: self.get_name(vnf_name) }}) if self.VLD in cp: vld = cp.pop(self.VLD) cpt[self.REQUIREMENTS].append({self.VIRT_LINK: { self.NODE: vld }}) cpt[self.PROPERTIES] = cp cp_name = cp[self.NAME].replace('/', '_') self.log.debug(_("{0}, CP node {1}: {2}"). format(self, cp_name, cpt)) tosca[self.TOPOLOGY_TMPL][self.NODE_TMPL][cp_name] = cpt return tosca
def assertExceptionMessage(exception, message): err_msg = exception.__name__ + ': ' + message report = ExceptionCollector.getExceptionsReport(False) assert err_msg in report, (_('Could not find "%(msg)s" in "%(rep)s".') % {'rep': report.__str__(), 'msg': err_msg})
def _create_csar_files(self, output_dir, name, tmpl, archive=False): if ToscaTemplate.TOSCA not in tmpl: self.log.error(_("Did not find TOSCA template for {0}"). format(name)) return # Create sub for each NS template subdir = os.path.join(output_dir, name) if os.path.exists(subdir): shutil.rmtree(subdir) os.makedirs(subdir) # Create the definitions dir def_dir = os.path.join(subdir, 'Definitions') os.makedirs(def_dir) entry_file = os.path.join(def_dir, name+'.yaml') self.log.debug(_("Writing file {0}"). format(entry_file)) with open(entry_file, 'w+') as f: f.write(tmpl[ToscaTemplate.TOSCA]) # Create the Tosca meta meta_dir = os.path.join(subdir, 'TOSCA-Metadata') os.makedirs(meta_dir) meta = '''TOSCA-Meta-File-Version: 1.0 CSAR-Version: 1.1 Created-By: RIFT.io Entry-Definitions: Definitions/''' meta_data = "{}{}".format(meta, name+'.yaml') meta_file = os.path.join(meta_dir, 'TOSCA.meta') self.log.debug(_("Writing file {0}:\n{1}"). format(meta_file, meta_data)) with open(meta_file, 'w+') as f: f.write(meta_data) # Copy other supporting files if ToscaTemplate.FILES in tmpl: for f in tmpl[ToscaTemplate.FILES]: self.log.debug(_("Copy supporting file {0}").format(f)) # Search in source packages if len(self.pkgs): for pkg in self.pkgs: # TODO(pjoseph): Need to add support for other file types fname = f[ToscaResource.NAME] dest_path = os.path.join(subdir, f[ToscaResource.DEST]) ftype = f[ToscaResource.TYPE] if ftype == 'image': image_file_map = rift.package.image.get_package_image_files(pkg) if fname in image_file_map: self.log.debug(_("Extracting image {0} to {1}"). format(fname, dest_path)) pkg.extract_file(image_file_map[fname], dest_path) break elif ftype == 'script': script_file_map = \ rift.package.script.PackageScriptExtractor.package_script_files(pkg) if fname in script_file_map: self.log.debug(_("Extracting script {0} to {1}"). format(fname, dest_path)) pkg.extract_file(script_file_map[fname], dest_path) break else: self.log.warn(_("Unknown file type {0}: {1}"). format(ftype, f)) #TODO(pjoseph): Search in other locations # Create the ZIP archive if archive: prev_dir=os.getcwd() os.chdir(subdir) try: zip_file = name + '.zip' zip_path = os.path.join(output_dir, zip_file) self.log.debug(_("Creating zip file {0}").format(zip_path)) zip_cmd = "zip -r {}.partial ." subprocess.check_call(zip_cmd.format(zip_path), shell=True, stdout=subprocess.DEVNULL) mv_cmd = "mv {0}.partial {0}" subprocess.check_call(mv_cmd.format(zip_path), shell=True, stdout=subprocess.DEVNULL) shutil.rmtree(subdir) return zip_path except subprocess.CalledProcessError as e: self.log.error(_("Creating CSAR archive failed: {0}"). format(e)) except Exception as e: self.log.exception(e) finally: os.chdir(prev_dir)