def _find_node_template_containing_attribute(self): if self.node_template_name == HOST: # Currently this is the only way to tell whether the function # is used within the outputs section of the TOSCA template. if isinstance(self.context, list): ExceptionCollector.appendException( ValueError(_( '"get_attribute: [ HOST, ... ]" is not allowed in ' '"outputs" section of the TOSCA template.'))) return node_tpl = self._find_host_containing_attribute() if not node_tpl: ExceptionCollector.appendException( ValueError(_( '"get_attribute: [ HOST, ... ]" was used in node ' 'template "{0}" but "{1}" was not found in ' 'the relationship chain.').format(self.context.name, HOSTED_ON))) else: node_tpl = self._find_node_template(self.args[0]) if node_tpl and \ not self._attribute_exists_in_type(node_tpl.type_definition): ExceptionCollector.appendException( KeyError(_('Attribute "%(att)s" was not found in node ' 'template "%(ntpl)s".') % {'att': self.attribute_name, 'ntpl': node_tpl.name})) return node_tpl
def convert_unit_size_to_num(size, unit=None): """Convert given size to a number representing given unit. If unit is None, convert to a number representing UNIT_SIZE_DEFAULT :param size: unit size e.g. 1 TB :param unit: unit to be converted to e.g GB :return: converted number e.g. 1000 for 1 TB size and unit GB """ if unit: unit = MemoryUnit.validate_unit(unit) else: unit = MemoryUnit.UNIT_SIZE_DEFAULT log.info( _("A memory unit is not provided for size; using the " "default unit %(default)s") % {"default": "B"} ) regex = re.compile("(\d*)\s*(\w*)") result = regex.match(str(size)).groups() if result[1]: unit_size = MemoryUnit.validate_unit(result[1]) converted = int( str_to_num(result[0]) * MemoryUnit.UNIT_SIZE_DICT[unit_size] * math.pow(MemoryUnit.UNIT_SIZE_DICT[unit], -1) ) log.info( _("Given size %(size)s is converted to %(num)s " "%(unit)s") % {"size": size, "num": converted, "unit": unit} ) else: converted = str_to_num(result[0]) return converted
def _validate_and_load_imports(self): imports_names = set() if not self.importslist: msg = _('"imports" keyname is defined without including ' 'templates.') log.error(msg) ExceptionCollector.appendException(ValidationError(message=msg)) return for import_def in self.importslist: if isinstance(import_def, dict): for import_name, import_uri in import_def.items(): if import_name in imports_names: msg = (_('Duplicate import name "%s" was found.') % import_name) log.error(msg) ExceptionCollector.appendException( ValidationError(message=msg)) imports_names.add(import_name) custom_type = self._load_import_template(import_name, import_uri) namespace_prefix = None if isinstance(import_uri, dict): namespace_prefix = import_uri.get( self.NAMESPACE_PREFIX) self._update_custom_def(custom_type, namespace_prefix) else: # old style of imports custom_type = self._load_import_template(None, import_def) if custom_type: self._update_custom_def(custom_type, None)
def convert_unit_size_to_num(size, unit=None): """Convert given size to a number representing given unit. If unit is None, convert to a number representing UNIT_SIZE_DEFAULT :param size: unit size e.g. 1 TB :param unit: unit to be converted to e.g GB :return: converted number e.g. 1000 for 1 TB size and unit GB """ if unit: unit = MemoryUnit.validate_unit(unit) else: unit = MemoryUnit.UNIT_SIZE_DEFAULT log.info(_('A memory unit is not provided for size; using the ' 'default unit %(default)s.') % {'default': 'B'}) regex = re.compile('(\d*)\s*(\w*)') result = regex.match(str(size)).groups() if result[1]: unit_size = MemoryUnit.validate_unit(result[1]) converted = int(str_to_num(result[0]) * MemoryUnit.UNIT_SIZE_DICT[unit_size] * math.pow(MemoryUnit.UNIT_SIZE_DICT [unit], -1)) log.info(_('Given size %(size)s is converted to %(num)s ' '%(unit)s.') % {'size': size, 'num': converted, 'unit': unit}) else: converted = (str_to_num(result[0])) return converted
def handle_properties(self): tosca_props = {} for prop in self.nodetemplate.get_properties_objects(): if isinstance(prop.value, GetInput): tosca_props[prop.name] = {'get_param': prop.value.input_name} else: if prop.name == "size": size_value = (ScalarUnit_Size(prop.value). get_num_from_scalar_unit('GiB')) if size_value == 0: # OpenStack Heat expects size in GB msg = _('Cinder Volume Size unit should be in GB.') log.error(msg) raise InvalidPropertyValueError( what=msg) elif int(size_value) < size_value: size_value = int(size_value) + 1 log.warning(_("Cinder unit value should be in " "multiples of GBs. so corrected " " %(prop_val)s to %(size_value)s GB.") % {'prop_val': prop.value, 'size_value': size_value}) tosca_props[prop.name] = int(size_value) else: tosca_props[prop.name] = prop.value self.properties = tosca_props
def take_action(self, parsed_args): log.debug(_('Translating the template with input parameters' '(%s).'), parsed_args) output = None if parsed_args.parameter: parsed_params = parsed_args.parameter else: parsed_params = {} if parsed_args.template_type == "tosca": path = parsed_args.template_file a_file = os.path.isfile(path) a_url = UrlUtils.validate_url(path) if not a_file else False if a_file or a_url: validate = parsed_args.validate_only if validate and validate.lower() == "true": ToscaTemplate(path, parsed_params, a_file) else: tosca = ToscaTemplate(path, parsed_params, a_file) translator = TOSCATranslator(tosca, parsed_params) output = translator.translate() else: msg = _('Could not find template file.') log.error(msg) sys.stdout.write(msg) raise SystemExit if output: if parsed_args.output_file: with open(parsed_args.output_file, 'w+') as f: f.write(output) else: print(output)
def test_constraint_for_scalar_unit(self): tpl_snippet = ''' server: type: tosca.my.nodes.Compute properties: cpu_frequency: 0.05 GHz disk_size: 500 MB mem_size: 1 MB ''' nodetemplates = yamlparser.simple_parse(tpl_snippet) nodetemplate = NodeTemplate('server', nodetemplates, self.custom_def) props = nodetemplate.get_properties() if 'cpu_frequency' in props.keys(): error = self.assertRaises(exception.ValidationError, props['cpu_frequency'].validate) self.assertEqual(_('The value "0.05 GHz" of property ' '"cpu_frequency" must be greater than or equal ' 'to "0.1 GHz".'), error.__str__()) if 'disk_size' in props.keys(): error = self.assertRaises(exception.ValidationError, props['disk_size'].validate) self.assertEqual(_('The value "500 MB" of property "disk_size" ' 'must be greater than or equal to "1 GB".'), error.__str__()) if 'mem_size' in props.keys(): error = self.assertRaises(exception.ValidationError, props['mem_size'].validate) self.assertEqual(_('The value "1 MB" of property "mem_size" is ' 'out of range "(min:1 MiB, max:1 GiB)".'), error.__str__())
def test_invalid_section_names(self): tosca_tpl = os.path.join( os.path.dirname(os.path.abspath(__file__)), "data/test_invalid_section_names.yaml") self.assertRaises(exception.ValidationError, ToscaTemplate, tosca_tpl, None) err1_msg = _('Template contains unknown field ' '"tosca_definitions_versions". Refer to the definition ' 'to verify valid values.') exception.ExceptionCollector.assertExceptionMessage( exception.UnknownFieldError, err1_msg) err2_msg = _('Template contains unknown field "descriptions". ' 'Refer to the definition to verify valid values.') exception.ExceptionCollector.assertExceptionMessage( exception.UnknownFieldError, err2_msg) err3_msg = _('Template contains unknown field "import". Refer to ' 'the definition to verify valid values.') exception.ExceptionCollector.assertExceptionMessage( exception.UnknownFieldError, err3_msg) err4_msg = _('Template contains unknown field "topology_templates". ' 'Refer to the definition to verify valid values.') exception.ExceptionCollector.assertExceptionMessage( exception.UnknownFieldError, err4_msg)
def _validate_external_reference(self, tpl_file, resource_file, raise_exc=True): """Verify that the external resource exists If resource_file is a URL verify that the URL is valid. If resource_file is a relative path verify that the path is valid considering base folder (self.temp_dir) and tpl_file. Note that in a CSAR resource_file cannot be an absolute path. """ if UrlUtils.validate_url(resource_file): msg = (_('The resource at "%s" cannot be accessed.') % resource_file) try: if UrlUtils.url_accessible(resource_file): return else: ExceptionCollector.appendException( URLException(what=msg)) self.error_caught = True except Exception: ExceptionCollector.appendException( URLException(what=msg)) self.error_caught = True if os.path.isfile(os.path.join(self.temp_dir, os.path.dirname(tpl_file), resource_file)): return if raise_exc: ExceptionCollector.appendException( ValueError(_('The resource "%s" does not exist.') % resource_file)) self.error_caught = True
def _find_operation_name(self, interface_name, operation_name): if(interface_name == 'Configure' or interface_name == 'tosca.interfaces.node.relationship.Configure'): if(operation_name in StatefulEntityType. interfaces_relationship_configure_operations): return operation_name else: ExceptionCollector.appendException( ValueError(_('Enter an operation of Configure interface' ).format(GET_OPERATION_OUTPUT))) return elif(interface_name == 'Standard' or interface_name == 'tosca.interfaces.node.lifecycle.Standard'): if(operation_name in StatefulEntityType.interfaces_node_lifecycle_operations): return operation_name else: ExceptionCollector.appendException( ValueError(_('Enter an operation of Standard interface' ).format(GET_OPERATION_OUTPUT))) return else: ExceptionCollector.appendException( ValueError(_('Enter a valid operation name' ).format(GET_OPERATION_OUTPUT))) return
def _find_node_template(self, node_template_name): if node_template_name == TARGET: if not isinstance(self.context.type_definition, RelationshipType): ExceptionCollector.appendException( KeyError(_('"TARGET" keyword can only be used in context' ' to "Relationships" target node'))) return return self.context.target if node_template_name == SOURCE: if not isinstance(self.context.type_definition, RelationshipType): ExceptionCollector.appendException( KeyError(_('"SOURCE" keyword can only be used in context' ' to "Relationships" source node'))) return return self.context.source name = self.context.name \ if node_template_name == SELF and \ not isinstance(self.context, list) \ else node_template_name for node_template in self.tosca_tpl.nodetemplates: if node_template.name == name: return node_template ExceptionCollector.appendException( KeyError(_( 'Node template "{0}" was not found.' ).format(node_template_name)))
def _get_capability_attribute(self, node_template, capability_name, attr_name): """Gets a node template capability attribute.""" caps = node_template.get_capabilities() if caps and capability_name in caps.keys(): cap = caps[capability_name] attribute = None attrs = cap.definition.get_attributes_def() if attrs and attr_name in attrs.keys(): attribute = attrs[attr_name] if not attribute: ExceptionCollector.appendException( KeyError(_('Attribute "%(attr)s" was not found in ' 'capability "%(cap)s" of node template ' '"%(ntpl1)s" referenced from node template ' '"%(ntpl2)s".') % {'attr': attr_name, 'cap': capability_name, 'ntpl1': node_template.name, 'ntpl2': self.context.name})) return attribute msg = _('Requirement/Capability "{0}" referenced from node template ' '"{1}" was not found in node template "{2}".').format( capability_name, self.context.name, node_template.name) ExceptionCollector.appendException(KeyError(msg))
def _get_capability_property(self, node_template, capability_name, property_name): """Gets a node template capability property.""" caps = node_template.get_capabilities() if caps and capability_name in caps.keys(): cap = caps[capability_name] property = None props = cap.get_properties() if props and property_name in props.keys(): property = props[property_name].value if not property: ExceptionCollector.appendException( KeyError(_('Property "%(prop)s" was not found in ' 'capability "%(cap)s" of node template ' '"%(ntpl1)s" referenced from node template ' '"%(ntpl2)s".') % {'prop': property_name, 'cap': capability_name, 'ntpl1': node_template.name, 'ntpl2': self.context.name})) return property msg = _('Requirement/Capability "{0}" referenced from node template ' '"{1}" was not found in node template "{2}".').format( capability_name, self.context.name, node_template.name) ExceptionCollector.appendException(KeyError(msg))
def validate_range(range): # list class check validate_list(range) # validate range list has a min and max if len(range) != 2: ExceptionCollector.appendException( ValueError(_('"%s" is not a valid range.') % range)) # validate min and max are numerics or the keyword UNBOUNDED min_test = max_test = False if not range[0] == RANGE_UNBOUNDED: min = validate_numeric(range[0]) else: min_test = True if not range[1] == RANGE_UNBOUNDED: max = validate_numeric(range[1]) else: max_test = True # validate the max > min (account for UNBOUNDED) if not min_test and not max_test: # Note: min == max is allowed if min > max: ExceptionCollector.appendException( ValueError(_('"%s" is not a valid range.') % range)) return range
def validate(self): if len(self.args) < 2: ExceptionCollector.appendException( ValueError(_('Illegal arguments for function "{0}". Expected ' 'arguments: "node-template-name", "req-or-cap"' '(optional), "property name"' ).format(GET_ATTRIBUTE))) return elif len(self.args) == 2: self._find_node_template_containing_attribute() else: node_tpl = self._find_node_template(self.args[0]) if node_tpl is None: return index = 2 attrs = node_tpl.type_definition.get_attributes_def() found = [attrs[self.args[1]]] if self.args[1] in attrs else [] if found: attr = found[0] else: index = 3 # then check the req or caps attr = self._find_req_or_cap_attribute(self.args[1], self.args[2]) value_type = attr.schema['type'] if len(self.args) > index: for elem in self.args[index:]: if value_type == "list": if not isinstance(elem, int): ExceptionCollector.appendException( ValueError(_('Illegal arguments for function' ' "{0}". "{1}" Expected positive' ' integer argument' ).format(GET_ATTRIBUTE, elem))) value_type = attr.schema['entry_schema']['type'] elif value_type == "map": value_type = attr.schema['entry_schema']['type'] elif value_type in Schema.PROPERTY_TYPES: ExceptionCollector.appendException( ValueError(_('Illegal arguments for function' ' "{0}". Unexpected attribute/' 'index value "{1}"' ).format(GET_ATTRIBUTE, elem))) return else: # It is a complex type data_type = DataType(value_type) props = data_type.get_all_properties() found = [props[elem]] if elem in props else [] if found: prop = found[0] value_type = prop.schema['type'] else: ExceptionCollector.appendException( KeyError(_('Illegal arguments for function' ' "{0}". Attribute name "{1}" not' ' found in "{2}"' ).format(GET_ATTRIBUTE, elem, value_type)))
def validate(self): if len(self.args) < 3: ExceptionCollector.appendException( ValueError( _('Invalid arguments for function "{0}". Expected ' "at least three arguments.").format(TOKEN) ) ) else: if not isinstance(self.args[1], str) or len(self.args[1]) != 1: ExceptionCollector.appendException( ValueError( _( 'Invalid arguments for function "{0}". ' "Expected single char value as second " "argument." ).format(TOKEN) ) ) if not isinstance(self.args[2], int): ExceptionCollector.appendException( ValueError( _( 'Invalid arguments for function "{0}". ' "Expected integer value as third " "argument." ).format(TOKEN) ) )
def _parse_parameters(self, parameter_list): parsed_inputs = {} if parameter_list.startswith('--parameters'): # Parameters are semi-colon separated inputs = parameter_list.split('--parameters=')[1].\ replace('"', '').split(';') # Each parameter should be an assignment for param in inputs: keyvalue = param.split('=') # Validate the parameter has both a name and value msg = _("'%(param)s' is not a well-formed parameter.") % { 'param': param} if keyvalue.__len__() is 2: # Assure parameter name is not zero-length or whitespace stripped_name = keyvalue[0].strip() if not stripped_name: log.error(msg) raise ValueError(msg) # Add the valid parameter to the dictionary parsed_inputs[keyvalue[0]] = keyvalue[1] else: log.error(msg) raise ValueError(msg) else: msg = _("'%(list)s' is not a valid parameter list.") % { 'list': parameter_list} log.error(msg) raise ValueError(msg) return parsed_inputs
def __init__(self, name, value=None, schema=None): self.name = name self.value = value self.schema = schema try: self.schema['type'] except KeyError: msg = (_('Schema definition of "%(pname)s" must have a "type" ' 'attribute.') % dict(pname=self.name)) ExceptionCollector.appendException( InvalidSchemaError(message=msg)) if 'required' in self.schema: required = self.schema['required'] if not isinstance(required, bool): if required.lower() not in self.VALID_REQUIRED_VALUES: valid_values = ', '.join(self.VALID_REQUIRED_VALUES) msg = (_('Schema definition of "%(propname)s" has ' '"required" attribute with invalid value ' '"%(value1)s". The value must be one of ' '"%(value2)s".') % {"propname": self.name, "value1": required, "value2": valid_values}) ExceptionCollector.appendException( InvalidSchemaError(message=msg))
def _find_node_template(self, node_template_name): if node_template_name == SELF: return self.context # enable the HOST value in the function if node_template_name == HOST: return self._find_host_containing_property() if node_template_name == TARGET: if not isinstance(self.context.type_definition, RelationshipType): ExceptionCollector.appendException( KeyError(_('"TARGET" keyword can only be used in context' ' to "Relationships" target node'))) return return self.context.target if node_template_name == SOURCE: if not isinstance(self.context.type_definition, RelationshipType): ExceptionCollector.appendException( KeyError(_('"SOURCE" keyword can only be used in context' ' to "Relationships" source node'))) return return self.context.source if not hasattr(self.tosca_tpl, 'nodetemplates'): return for node_template in self.tosca_tpl.nodetemplates: if node_template.name == node_template_name: return node_template ExceptionCollector.appendException( KeyError(_( 'Node template "{0}" was not found.' ).format(node_template_name)))
def _translate_inputs(self): hot_inputs = [] hot_default = None for input in self.inputs: hot_input_type = TOSCA_TO_HOT_INPUT_TYPES[input.type] if input.name in self.parsed_params: input_type = hot_input_type if input.type == "scalar-unit.size": input_type = input.type DataEntity.validate_datatype(input_type, self.parsed_params[input.name]) hot_default = self.parsed_params[input.name] elif input.default is not None: hot_default = input.default else: log.warning(_("Need to specify a value " "for input {0}").format(input.name)) raise Exception(_("Need to specify a value " "for input {0}").format(input.name)) if input.type == "scalar-unit.size": # Assumption here is to use this scalar-unit.size for size of # cinder volume in heat templates and will be in GB. # should add logic to support other types if needed. input_value = hot_default hot_default = (ScalarUnit_Size(hot_default). get_num_from_scalar_unit('GiB')) if hot_default == 0: log.warning(_('Unit value should be > 0.')) raise Exception(_( 'Unit value should be > 0.')) elif int(hot_default) < hot_default: hot_default = int(hot_default) + 1 log.warning(_("Cinder unit value should be in multiples" " of GBs. So corrected %(input_value)s " "to %(hot_default)s GB.") % {'input_value': input_value, 'hot_default': hot_default}) if input.type == 'version': hot_default = TOSCAVersionProperty(hot_default).get_version() hot_constraints = [] if input.constraints: for constraint in input.constraints: constraint.validate( int(hot_default) if hot_input_type == "number" else hot_default) hc, hvalue = self._translate_constraints( constraint.constraint_key, constraint.constraint_value) hot_constraints.append({hc: hvalue}) hot_inputs.append(HotParameter(name=input.name, type=hot_input_type, description=input.description, default=hot_default, constraints=hot_constraints)) return hot_inputs
def _translate(self, sourcetype, path, parsed_params, a_file): output = None if sourcetype == "tosca": log.debug(_('Loading the tosca template.')) tosca = ToscaTemplate(path, parsed_params, a_file) translator = TOSCATranslator(tosca, parsed_params, self.deploy) log.debug(_('Translating the tosca template.')) output = translator.translate() return output
def _validate(self, args): if len(args) < 1: msg = _('The program requires a template or a CSAR file as an ' 'argument. Please refer to the usage documentation.') raise ValueError(msg) if "--template-file=" not in args[0]: msg = _('The program expects "--template-file" as the first ' 'argument. Please refer to the usage documentation.') raise ValueError(msg)
def test_csar_invalid_script_path(self): path = os.path.join(self.base_path, "data/CSAR/csar_wordpress_invalid_script_path.zip") csar = CSAR(path) error = self.assertRaises(ValueError, csar.validate) self.assertTrue( str(error) == _('The resource Scripts/WordPress/install.sh does ' 'not exist.') or str(error) == _('The resource Scripts/WordPress/configure.sh does ' 'not exist.'))
def verify_template(self): if ExceptionCollector.exceptionsCaught(): raise ValidationError( message=(_('\nThe input "%(path)s" failed validation with the ' 'following error(s): \n\n\t') % {'path': self.input_path}) + '\n\t'.join(ExceptionCollector.getExceptionsReport())) else: msg = (_('The input "%(path)s" successfully passed validation.') % {'path': self.input_path}) log.info(msg) print(msg)
def test_greater_than_validate_fail(self): schema = {'greater_than': 4} constraint = Constraint('prop', Schema.INTEGER, schema) error = self.assertRaises(exception.ValidationError, constraint.validate, 3) self.assertEqual(_('The value "3" of property "prop" must be greater ' 'than "4".'), str(error)) error = self.assertRaises(exception.ValidationError, constraint.validate, 4) self.assertEqual(_('The value "4" of property "prop" must be greater ' 'than "4".'), str(error))
def main(self, args): # TODO(spzala): set self.deploy based on passed args once support for # --deploy argument is enabled. self.deploy = False self._validate(args) path = args[0].split("--template-file=")[1] # e.g. --template_file=translator/tests/data/tosca_helloworld.yaml template_type = args[1].split("--template-type=")[1] # e.g. --template_type=tosca if not template_type: msg = _("Template type is needed. For example, 'tosca'") # log.error(msg) raise ValueError(msg) elif template_type not in self.SUPPORTED_TYPES: msg = _("%(value)s is not a valid template type.") % {"value": template_type} # log.error(msg) raise ValueError(msg) parsed_params = {} validate_only = None output_file = None if len(args) > 2: parameters = None for arg in args: if "--validate-only=" in arg: validate_only = arg if "--parameters=" in arg: parameters = arg if "--output-file=" in arg: output = arg output_file = output.split("--output-file=")[1] if parameters: parsed_params = self._parse_parameters(parameters) a_file = os.path.isfile(path) a_url = UrlUtils.validate_url(path) if not a_file else False if a_file or a_url: run_only_validation = False if validate_only: value = validate_only.split("-validate-only=")[1].lower() if template_type == "tosca" and value == "true": run_only_validation = True if run_only_validation: ToscaTemplate(path, parsed_params, a_file) else: # log.info( # _('Checked whether template path is a file or url path.')) heat_tpl = self._translate(template_type, path, parsed_params, a_file) if heat_tpl: self._write_output(heat_tpl, output_file) else: msg = _("The path %(path)s is not a valid file or URL.") % {"path": path} # log.error(msg) raise ValueError(msg)
def test_tosca_version_property_invalid_qualifier(self): version = '18.0.1-xyz' exp_msg = _('Value of TOSCA version property "18.0.1-xyz" is invalid.') err = self.assertRaises(InvalidTOSCAVersionPropertyException, TOSCAVersionProperty, version) self.assertEqual(exp_msg, err.__str__()) version = '0.0.0.abc' exp_msg = _('Value of TOSCA version property "0.0.0.abc" is invalid.') err = self.assertRaises(InvalidTOSCAVersionPropertyException, TOSCAVersionProperty, version) self.assertEqual(exp_msg, err.__str__())
def test_length_validate_fail(self): schema = {'length': 4} constraint = Constraint('prop', Schema.STRING, schema) error = self.assertRaises(exception.ValidationError, constraint.validate, 'abc') self.assertEqual(_('Length of value "abc" of property "prop" must ' 'be equal to "4".'), str(error)) error = self.assertRaises(exception.ValidationError, constraint.validate, 'abcde') self.assertEqual(_('Length of value "abcde" of property "prop" must ' 'be equal to "4".'), str(error))
def __init__(self, property_name, property_type, constraint): super(InRange, self).__init__(property_name, property_type, constraint) if(not isinstance(self.constraint_value, collections.Sequence) or (len(constraint[self.IN_RANGE]) != 2)): raise InvalidSchemaError(message=_('in_range must be a list.')) for value in self.constraint_value: if not isinstance(value, self.valid_types): raise InvalidSchemaError(_('in_range value must ' 'be comparable.')) self.min = self.constraint_value[0] self.max = self.constraint_value[1]
def _validate_inputs(self): """validate the inputs of substitution mappings. The inputs defined by the topology template have to match the properties of the node type or the substituted node. If there are more inputs than the substituted node has properties, default values must be defined for those inputs. """ all_inputs = set([input.name for input in self.inputs]) required_properties = set([p.name for p in self.node_definition. get_properties_def_objects() if p.required and p.default is None]) # Must provide inputs for required properties of node type. for property in required_properties: # Check property which is 'required' and has no 'default' value if property not in all_inputs: ExceptionCollector.appendException( MissingRequiredInputError( what=_('SubstitutionMappings with node_type ') + self.node_type, input_name=property)) # If the optional properties of node type need to be customized by # substituted node, it also is necessary to define inputs for them, # otherwise they are not mandatory to be defined. customized_parameters = set(self.sub_mapped_node_template .get_properties().keys() if self.sub_mapped_node_template else []) all_properties = set(self.node_definition.get_properties_def()) for parameter in customized_parameters - all_inputs: if parameter in all_properties: ExceptionCollector.appendException( MissingRequiredInputError( what=_('SubstitutionMappings with node_type ') + self.node_type, input_name=parameter)) # Additional inputs are not in the properties of node type must # provide default values. Currently the scenario may not happen # because of parameters validation in nodetemplate, here is a # guarantee. for input in self.inputs: if input.name in all_inputs - all_properties \ and input.default is None: ExceptionCollector.appendException( MissingDefaultValueError( what=_('SubstitutionMappings with node_type ') + self.node_type, input_name=input.name))
def _read_template_yaml(self, template): data = self.zfile.read(template) invalid_tosca_yaml_err_msg = ( _('The file "%(template)s" in the CSAR "%(csar)s" does not ' 'contain valid YAML content.') % {'template': template, 'csar': self.path}) try: tosca_yaml = yaml.safe_load(data) if type(tosca_yaml) is not dict: ExceptionCollector.appendException( ValidationError(message=invalid_tosca_yaml_err_msg)) return None return tosca_yaml except Exception: ExceptionCollector.appendException( ValidationError(message=invalid_tosca_yaml_err_msg)) return None
def _best_flavor(self, properties): log.info(_('Choosing the best flavor for given attributes.')) # Check whether user exported all required environment variables. flavors = FLAVORS if self._check_for_env_variables(): resp = self._create_nova_flavor_dict() if resp: flavors = resp # start with all flavors match_all = flavors.keys() # TODO(anyone): Handle the case where the value contains something like # get_input instead of a value. # flavors that fit the CPU count cpu = properties.get(self.NUM_CPUS) if cpu is None: self._log_compute_msg(self.NUM_CPUS, 'flavor') match_cpu = self._match_flavors(match_all, flavors, self.NUM_CPUS, cpu) # flavors that fit the mem size mem = properties.get(self.MEM_SIZE) if mem: mem = utils.MemoryUnit.convert_unit_size_to_num(mem, 'MB') else: self._log_compute_msg(self.MEM_SIZE, 'flavor') match_cpu_mem = self._match_flavors(match_cpu, flavors, self.MEM_SIZE, mem) # flavors that fit the disk size disk = properties.get(self.DISK_SIZE) if disk: disk = utils.MemoryUnit.\ convert_unit_size_to_num(disk, 'GB') else: self._log_compute_msg(self.DISK_SIZE, 'flavor') match_cpu_mem_disk = self._match_flavors(match_cpu_mem, flavors, self.DISK_SIZE, disk) # if multiple match, pick the flavor with the least memory # the selection can be based on other heuristic, e.g. pick one with the # least total resource if len(match_cpu_mem_disk) > 1: return self._least_flavor(match_cpu_mem_disk, flavors, 'mem_size') elif len(match_cpu_mem_disk) == 1: return match_cpu_mem_disk[0] else: return None
def __init__(self, version): self.version = str(version) match = self.VERSION_RE.match(self.version) if not match: ExceptionCollector.appendException( InvalidTOSCAVersionPropertyException(what=(self.version))) return ver = match.groupdict() if self.version in ['0', '0.0', '0.0.0']: log.warning(_('Version assumed as not provided')) self.version = None self.minor_version = ver['minor_version'] self.major_version = ver['major_version'] self.fix_version = ver['fix_version'] self.qualifier = self._validate_qualifier(ver['qualifier']) self.build_version = self._validate_build(ver['build_version']) self._validate_major_version(self.major_version)
def _tosca_blockstore_test(self, tpl_snippet, expectedprops): nodetemplates = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet) ['node_templates']) name = list(nodetemplates.keys())[0] try: nodetemplate = NodeTemplate(name, nodetemplates) tosca_block_store = ToscaBlockStorage(nodetemplate) tosca_block_store.handle_properties() if not self._compare_properties(tosca_block_store.properties, expectedprops): raise Exception( _("Hot Properties are not" " same as expected properties")) except Exception: # for time being rethrowing. Will be handled future based # on new development raise
def test_missing_required_keyname(self): tpl_snippet = ''' substitution_mappings: capabilities: database_endpoint: [ db_app, database_endpoint ] requirements: receiver1: [ tran_app, receiver1 ] ''' sub_mappings = (toscaparser.utils.yamlparser. simple_parse(tpl_snippet))['substitution_mappings'] expected_message = _('SubstitutionMappings used in topology_template ' 'is missing required field "node_type".') err = self.assertRaises( exception.MissingRequiredFieldError, lambda: SubstitutionMappings(sub_mappings, None, None, None, None, None)) self.assertEqual(expected_message, err.__str__())
def test_node_template_with_wrong_properties_keyname(self): """Node template keyname 'properties' given as 'propertiessss'.""" tpl_snippet = ''' node_templates: mysql_dbms: type: tosca.nodes.DBMS propertiessss: root_password: aaa port: 3376 ''' expectedmessage = _('Node template "mysql_dbms" contains unknown ' 'field "propertiessss". Refer to the definition ' 'to verify valid values.') err = self.assertRaises( exception.UnknownFieldError, lambda: self._single_node_template_content_test(tpl_snippet)) self.assertEqual(expectedmessage, err.__str__())
def validate_timestamp(value): try: # Note: we must return our own exception message # as dateutil's parser returns different types / values on # different systems. OSX, for example, returns a tuple # containing a different error message than Linux dateutil.parser.parse(value) except Exception as e: original_err_msg = str(e) log.error(original_err_msg) ExceptionCollector.appendException( ValueError( _('"%(val)s" is not a valid timestamp. "%(msg)s"') % { 'val': value, 'msg': original_err_msg })) return
def test_relationship_template_properties(self): tpl_snippet = ''' relationship_templates: storage_attachto: type: AttachesTo properties: device: test_device ''' expectedmessage = _('"properties" of template "storage_attachto" is ' 'missing required field "[\'location\']".') rel_template = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet) )['relationship_templates'] name = list(rel_template.keys())[0] rel_template = RelationshipTemplate(rel_template[name], name) err = self.assertRaises(exception.MissingRequiredFieldError, rel_template.validate) self.assertEqual(expectedmessage, six.text_type(err))
def get_hot_attribute(self, attribute, args): attr = {} # Convert from a TOSCA attribute for a nodetemplate to a HOT # attribute for the matching resource. Unless there is additional # runtime support, this should be a one to one mapping. # Note: We treat private and public IP addresses equally, but # this will change in the future when TOSCA starts to support # multiple private/public IP addresses. log.debug( _('Converting TOSCA attribute for a nodetemplate to a HOT \ attriute.')) if attribute == 'private_address' or \ attribute == 'public_address': attr['get_attr'] = [self.name, 'networks', 'private', 0] return attr
def __init__(self, name, value=None, schema=None): self.name = name self.value = value self.schema = schema self._status = self.PROPERTY_STATUS_DEFAULT self._required = self.PROPERTY_REQUIRED_DEFAULT # Validate required 'type' property exists try: self.schema['type'] except KeyError: msg = (_('Schema definition of "%(pname)s" must have a "type" ' 'attribute.') % dict(pname=self.name)) ExceptionCollector.appendException(InvalidSchemaError(message=msg)) if self.schema: self._load_required_attr_from_schema() self._load_status_attr_from_schema()
def test_invalid_nodetype(self): tpl_snippet = ''' substitution_mappings: node_type: example.DatabaseSubsystem1 capabilities: database_endpoint: [ db_app, database_endpoint ] requirements: receiver1: [ tran_app, receiver1 ] ''' sub_mappings = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet) )['substitution_mappings'] custom_defs = self._get_custom_types() expected_message = _('Node type "example.DatabaseSubsystem1" ' 'is not a valid type.') err = self.assertRaises( exception.InvalidNodeTypeError, lambda: SubstitutionMappings( sub_mappings, None, None, None, None, custom_defs)) self.assertEqual(expected_message, err.__str__())
def test_node_template_with_wrong_artifacts_keyname(self): """Node template keyname 'artifacts' given as 'artifactsss'.""" tpl_snippet = ''' node_templates: mysql_database: type: tosca.nodes.Database artifactsss: db_content: implementation: files/my_db_content.txt type: tosca.artifacts.File ''' expectedmessage = _('Node template "mysql_database" contains unknown ' 'field "artifactsss". Refer to the definition to ' 'verify valid values.') err = self.assertRaises( exception.UnknownFieldError, lambda: self._single_node_template_content_test(tpl_snippet)) self.assertEqual(expectedmessage, err.__str__())
def test_inputs(self): tpl_snippet = ''' inputs: cpus: type: integer description: Number of CPUs for the server. constraint: - valid_values: [ 1, 2, 4, 8 ] ''' inputs = ( toscaparser.utils.yamlparser.simple_parse(tpl_snippet)['inputs']) name, attrs = list(inputs.items())[0] input = Input(name, attrs) err = self.assertRaises(exception.UnknownFieldError, input.validate) self.assertEqual( _('Input "cpus" contains unknown field "constraint". ' 'Refer to the definition to verify valid values.'), err.__str__())
def test_node_template_requirements_with_wrong_node_keyname(self): """Node template requirements keyname 'node' given as 'nodes'.""" tpl_snippet = ''' node_templates: mysql_database: type: tosca.nodes.Database requirements: - host: nodes: mysql_dbms ''' expectedmessage = _('"requirements" of template "mysql_database" ' 'contains unknown field "nodes". Refer to the ' 'definition to verify valid values.') err = self.assertRaises( exception.UnknownFieldError, lambda: self._single_node_template_content_test(tpl_snippet)) self.assertEqual(expectedmessage, err.__str__())
def __init__(self, importslist, path, type_definition_list=None, tpl=None): self.importslist = importslist self.custom_defs = {} if not path and not tpl: msg = _('Input tosca template is not provided.') log.warning(msg) ExceptionCollector.appendException(ValidationError(message=msg)) self.path = path self.repositories = {} if tpl and tpl.get('repositories'): self.repositories = tpl.get('repositories') self.type_definition_list = [] if type_definition_list: if isinstance(type_definition_list, list): self.type_definition_list = type_definition_list else: self.type_definition_list.append(type_definition_list) self._validate_and_load_imports()
def _inputs(self): inputs = [] for name, attrs in self._tpl_inputs().items(): input = Input(name, attrs) if self.parsed_params and name in self.parsed_params: input.validate(self.parsed_params[name]) else: default = input.default if default: input.validate(default) if (self.parsed_params and input.name not in self.parsed_params or self.parsed_params is None) and input.required \ and input.default is None: log.warning(_('The required parameter %s ' 'is not provided') % input.name) inputs.append(input) return inputs
def __init__(self, nodetemplate, name=None, type=None, properties=None, metadata=None, depends_on=None, update_policy=None, deletion_policy=None): log.debug(_('Translating TOSCA node type to HOT resource type.')) self.nodetemplate = nodetemplate if name: self.name = name else: self.name = nodetemplate.name self.type = type self.properties = properties or {} # special case for HOT softwareconfig if type == 'OS::Heat::SoftwareConfig': self.properties['group'] = 'script' self.metadata = metadata # The difference between depends_on and depends_on_nodes is # that depends_on defines dependency in the context of the # HOT template and it is used during the template output. # Depends_on_nodes defines the direct dependency between the # tosca nodes and is not used during the output of the # HOT template but for internal processing only. When a tosca # node depends on another node it will be always added to # depends_on_nodes but not always to depends_on. For example # if the source of dependency is a server, the dependency will # be added as properties.get_resource and not depends_on if depends_on: self.depends_on = depends_on self.depends_on_nodes = depends_on else: self.depends_on = [] self.depends_on_nodes = [] self.update_policy = update_policy self.deletion_policy = deletion_policy self.group_dependencies = {} # if hide_resource is set to true, then this resource will not be # generated in the output yaml. self.hide_resource = False
def _groups(self): groups = [] member_nodes = None for group_name, group_tpl in self._tpl_groups().items(): member_names = group_tpl.get('members') if member_names is not None: DataEntity.validate_datatype('list', member_names) if len(member_names) < 1 or \ len(member_names) != len(set(member_names)): exception.ExceptionCollector.appendException( exception.InvalidGroupTargetException( message=_('Member nodes "%s" should be >= 1 ' 'and not repeated') % member_names)) else: member_nodes = self._get_group_members(member_names) group = Group(group_name, group_tpl, member_nodes, self.custom_defs) groups.append(group) return groups
def deploy_on_heat(self, session, auth, template, stack_name, file_name, parameters): endpoint = auth.get_endpoint(session, service_type="orchestration") heat_client = heatclient.client.Client('1', session=session, auth=auth, endpoint=endpoint) heat_stack_name = stack_name if stack_name else \ 'heat_' + file_name + '_' + str(uuid.uuid4()).split("-")[0] msg = _('Deploy the generated template, the stack name is %(name)s.')\ % {'name': heat_stack_name} log.debug(msg) tpl = yaml.load(template) tpl['heat_template_version'] = str(tpl['heat_template_version']) self._create_stack(heat_client=heat_client, stack_name=heat_stack_name, template=tpl, parameters=parameters)
def test_invalid_keyname(self): tpl_snippet = ''' substitution_mappings: node_type: example.DatabaseSubsystem capabilities: database_endpoint: [ db_app, database_endpoint ] requirements: receiver1: [ tran_app, receiver1 ] invalid_key: 123 ''' sub_mappings = (toscaparser.utils.yamlparser.simple_parse(tpl_snippet) )['substitution_mappings'] expected_message = _('SubstitutionMappings contains unknown field ' '"invalid_key". Refer to the definition ' 'to verify valid values.') err = self.assertRaises( exception.UnknownFieldError, lambda: SubstitutionMappings( sub_mappings, None, None, None, None, None)) self.assertEqual(expected_message, err.__str__())
def get_main_template_yaml(self): main_template = self.get_main_template() if main_template: data = self.zfile.read(main_template) invalid_tosca_yaml_err_msg = ( _('The file "%(template)s" in the CSAR "%(csar)s" does not ' 'contain valid TOSCA YAML content.') % { 'template': main_template, 'csar': self.path }) try: tosca_yaml = yaml.load(data) if type(tosca_yaml) is not dict: ExceptionCollector.appendException( ValidationError(message=invalid_tosca_yaml_err_msg)) return tosca_yaml except Exception: ExceptionCollector.appendException( ValidationError(message=invalid_tosca_yaml_err_msg))
class TOSCAException(Exception): '''Base exception class for TOSCA To correctly use this class, inherit from it and define a 'msg_fmt' property. ''' _FATAL_EXCEPTION_FORMAT_ERRORS = False message = _('An unknown exception occurred.') def __init__(self, **kwargs): try: self.message = self.msg_fmt % kwargs except KeyError: exc_info = sys.exc_info() log.exception('Exception in string format operation: %s' % exc_info[1]) if TOSCAException._FATAL_EXCEPTION_FORMAT_ERRORS: raise exc_info[0] def __str__(self): return self.message @staticmethod def generate_inv_schema_property_error(self, attr, value, valid_values): msg = (_('Schema definition of "%(propname)s" has ' '"%(attr)s" attribute with invalid value ' '"%(value1)s". The value must be one of ' '"%(value2)s".') % { "propname": self.name, "attr": attr, "value1": value, "value2": valid_values }) ExceptionCollector.appendException(InvalidSchemaError(message=msg)) @staticmethod def set_fatal_format_exception(flag): if isinstance(flag, bool): TOSCAException._FATAL_EXCEPTION_FORMAT_ERRORS = flag
def __init__(self, **kwargs): try: msg_prefix = '' if TOSCAException._node: msg_prefix = "{}".format(TOSCAException._node) if TOSCAException._type: msg_prefix = msg_prefix + '({})'.format( TOSCAException._type) msg_prefix = msg_prefix + ': ' self.message = msg_prefix + self.msg_fmt % kwargs except KeyError: exc_info = sys.exc_info() log.exception( _('Exception in string format operation: %s') % exc_info[1]) if TOSCAException._FATAL_EXCEPTION_FORMAT_ERRORS: raise exc_info[0]
def get_parser(self, argv): parser = argparse.ArgumentParser(prog="heat-translator") parser.add_argument('--template-file', metavar='<filename>', required=True, help=_('Template file to load.')) parser.add_argument('--output-file', metavar='<filename>', help=_('Where to store the output file. If not ' 'passed, it will be printed to stdin.')) parser.add_argument('--template-type', metavar='<input-template-type>', choices=self.SUPPORTED_TYPES, default='tosca', help=(_('Template type to parse. Choose between ' '%s.') % self.SUPPORTED_TYPES)) parser.add_argument('--parameters', metavar='<param1=val1;param2=val2;...>', help=_('Optional input parameters.')) parser.add_argument('--validate-only', action='store_true', default=False, help=_('Only validate input template, do not ' 'perform translation.')) parser.add_argument('--deploy', action='store_true', default=False, help=_('Whether to deploy the generated template ' 'or not.')) parser.add_argument('--stack-name', metavar='<stack-name>', required=False, help=_('The name to use for the Heat stack when ' 'deploy the generated template.')) self._append_global_identity_args(parser, argv) return parser
def test_csar_invalid_import_url(self): path = os.path.join(self.base_path, "data/CSAR/csar_wordpress_invalid_import_url.zip") invalid_file = ( "https://raw.githubusercontent.com/openstack/tosca-parser/" "master/toscaparser/tests/data/CSAR/" "tosca_single_instance_wordpress/Definitions/wordpress1.yaml") csar = CSAR(path) error = self.assertRaises(URLException, csar.validate) # self.assertEqual(_('Failed to reach server ' # '"https://raw.githubusercontent.com/openstack/' # 'tosca-parser/master/toscaparser/tests/data/CSAR/' # 'tosca_single_instance_wordpress/Definitions/' # 'wordpress1.yaml". Reason is: Not Found.'), # str(error)) self.assertIn( _('Request error "{path}": Reason is 404 Client Error'.format( path=invalid_file)), str(error)) self.assertTrue(csar.temp_dir is None or not os.path.exists(csar.temp_dir))
def _inputs(self): inputs = [] parsed_params = self.parsed_params or {} for name, attrs in self._tpl_inputs().items(): input = Input(name, attrs) if name in parsed_params: input.validate(parsed_params[name]) else: default = input.default if default: input.validate(default) if (input.name not in parsed_params and input.required and input.default is None): exception.ExceptionCollector.appendException( exception.MissingRequiredInputError( what=_('Topology template'), input_name=input.name)) inputs.append(input) return inputs
def _populate_image_dict(self): '''Populates and returns the images dict using Glance ReST API''' images_dict = {} try: access_dict = translator.common.utils.get_ks_access_dict() access_token = translator.common.utils.get_token_id(access_dict) if access_token is None: return None glance_url = translator.common.utils.get_url_for( access_dict, 'image') if not glance_url: return None glance_response = requests.get( glance_url + '/v2/images', headers={'X-Auth-Token': access_token}) if glance_response.status_code != 200: return None images = json.loads(glance_response.content)["images"] for image in images: image_resp = requests.get( glance_url + '/v2/images/' + image["id"], headers={'X-Auth-Token': access_token}) if image_resp.status_code != 200: continue metadata = ["architecture", "type", "distribution", "version"] image_data = json.loads(image_resp.content) if any(key in image_data.keys() for key in metadata): images_dict[image_data["name"]] = dict() for key in metadata: if key in image_data.keys(): images_dict[image_data["name"]][key] = \ image_data[key] else: continue except Exception as e: # Handles any exception coming from openstack log.warn( _('Choosing predefined flavors since received ' 'Openstack Exception: %s') % str(e)) return images_dict
def _validate_template(self, template_data, template): if 'topology_template' in template_data: topology_template = template_data['topology_template'] if 'node_templates' in topology_template: node_templates = topology_template['node_templates'] for node_template_key in node_templates: node_template = node_templates[node_template_key] if 'artifacts' in node_template: artifacts = node_template['artifacts'] for artifact_key in artifacts: artifact = artifacts[artifact_key] if isinstance(artifact, six.string_types): self._validate_external_reference( template, artifact) elif isinstance(artifact, dict): if 'file' in artifact: self._validate_external_reference( template, artifact['file']) else: ExceptionCollector.appendException( ValueError( _('Unexpected artifact ' 'definition for "%s".') % artifact_key)) if 'interfaces' in node_template: interfaces = node_template['interfaces'] for interface_key in interfaces: interface = interfaces[interface_key] for opertation_key in interface: operation = interface[opertation_key] if isinstance(operation, six.string_types): self._validate_external_reference( template, operation, False) elif isinstance(operation, dict): if 'implementation' in operation: self._validate_external_reference( template, operation['implementation'])
def _create_connect_config(self, source_node, target_name, connect_interface): connectsto_resources = [] target_node = self._find_tosca_node(target_name) # the configuration can occur on the source or the target connect_config = connect_interface.get('pre_configure_target') if connect_config is not None: config_location = 'target' else: connect_config = connect_interface.get('pre_configure_source') if connect_config is not None: config_location = 'source' else: msg = _("Template error: " "no configuration found for ConnectsTo " "in {1}").format(self.nodetemplate.name) log.error(msg) raise Exception(msg) config_name = source_node.name + '_' + target_name + '_connect_config' implement = connect_config.get('implementation') if config_location == 'target': hot_config = HotResource(target_node, config_name, 'OS::Heat::SoftwareConfig', {'config': {'get_file': implement}}) elif config_location == 'source': hot_config = HotResource(source_node, config_name, 'OS::Heat::SoftwareConfig', {'config': {'get_file': implement}}) connectsto_resources.append(hot_config) hot_target = self._find_hot_resource_for_tosca(target_name) hot_source = self._find_hot_resource_for_tosca(source_node.name) connectsto_resources.append(hot_config. handle_connectsto(source_node, target_node, hot_source, hot_target, config_location, connect_interface)) return connectsto_resources
def test_validation_in_nested_datatype(self): value_snippet = ''' name: Mike gender: male contacts: - {contact_name: Tom, contact_email: [email protected], contact_pone: '123456789'} - {contact_name: Jerry, contact_email: [email protected], contact_phone: '321654987'} ''' value = yamlparser.simple_parse(value_snippet) data = DataEntity('tosca.my.datatypes.People', value, DataTypeTest.custom_type_def) error = self.assertRaises(exception.UnknownFieldError, data.validate) self.assertEqual( _('Data value of type ' '"tosca.my.datatypes.ContactInfo" contains unknown ' 'field "contact_pone". Refer to the definition to ' 'verify valid values.'), error.__str__())