def handle_properties(self): tosca_props = self._get_tosca_props( self.nodetemplate.get_properties_objects()) objectstore_props = {} container_quota = {} skip_check = False for key, value in tosca_props.items(): if key == "name": objectstore_props["name"] = value elif key == "size" or key == "maxsize": # currently heat is not supporting dynamically increase # the container quota-size. # if both defined in tosca template, consider store_maxsize. if skip_check: continue quota_size = None if "maxsize" in tosca_props.keys(): quota_size = tosca_props["maxsize"] else: quota_size = tosca_props["size"] container_quota["Quota-Bytes"] = \ ScalarUnit_Size(quota_size).get_num_from_scalar_unit() objectstore_props["X-Container-Meta"] = container_quota skip_check = True objectstore_props["X-Container-Read"] = '".r:*"' self.properties = objectstore_props
def handle_properties(self): tosca_props = {} for prop in self.nodetemplate.get_properties_objects(): if isinstance(prop.value, GetInput): tosca_props[prop.name] = {'get_param': prop.value.input_name} else: if prop.name == "size": size_value = (ScalarUnit_Size( prop.value).get_num_from_scalar_unit('GiB')) if size_value == 0: # OpenStack Heat expects size in GB msg = _('Cinder Volume Size unit should be in GB.') log.error(msg) raise InvalidPropertyValueError(what=msg) elif int(size_value) < size_value: size_value = int(size_value) + 1 log.warning( _("Cinder unit value should be in " "multiples of GBs. so corrected " " %(prop_val)s to %(size_value)s GB.") % { 'prop_val': prop.value, 'size_value': size_value }) tosca_props[prop.name] = int(size_value) else: tosca_props[prop.name] = prop.value self.properties = tosca_props
def _translate_inputs(self): hot_inputs = [] log.info(_('Translating TOSCA input type to HOT input type.')) for input in self.inputs: hot_default = None hot_input_type = TOSCA_TO_HOT_INPUT_TYPES[input.type] if input.name in self.parsed_params: hot_default = DataEntity.validate_datatype( input.type, self.parsed_params[input.name]) elif input.default is not None: hot_default = DataEntity.validate_datatype( input.type, input.default) else: if self.deploy: msg = _("Need to specify a value " "for input {0}.").format(input.name) log.error(msg) raise Exception(msg) if input.type == "scalar-unit.size": # Assumption here is to use this scalar-unit.size for size of # cinder volume in heat templates and will be in GB. # should add logic to support other types if needed. input_value = hot_default hot_default = (ScalarUnit_Size( hot_default).get_num_from_scalar_unit('GiB')) if hot_default == 0: msg = _('Unit value should be > 0.') log.error(msg) raise Exception(msg) elif int(hot_default) < hot_default: hot_default = int(hot_default) + 1 log.warning( _("Cinder unit value should be in multiples" " of GBs. So corrected %(input_value)s " "to %(hot_default)s GB.") % { 'input_value': input_value, 'hot_default': hot_default }) if input.type == 'version': hot_default = TOSCAVersionProperty(hot_default).get_version() hot_constraints = [] if input.constraints: for constraint in input.constraints: if hot_default: constraint.validate(hot_default) hc, hvalue = self._translate_constraints( constraint.constraint_key, constraint.constraint_value) hot_constraints.append({hc: hvalue}) hot_inputs.append( HotParameter(name=input.name, type=hot_input_type, description=input.description, default=hot_default, constraints=hot_constraints)) return hot_inputs
def validate_datatype( type, value, entry_schema=None, custom_def=None, prop_name=None, self=None ): """Validate value with given type. If type is list or map, validate its entry by entry_schema(if defined) If type is a user-defined complex datatype, custom_def is required. """ from toscaparser.functions import is_function if is_function(value): return value if type == Schema.ANY: return value if type == Schema.STRING: return validateutils.validate_string(value) elif type == Schema.INTEGER: return validateutils.validate_integer(value) elif type == Schema.FLOAT: return validateutils.validate_float(value) elif type == Schema.NUMBER: return validateutils.validate_numeric(value) elif type == Schema.BOOLEAN: return validateutils.validate_boolean(value) elif type == Schema.RANGE: return validateutils.validate_range(value) elif type == Schema.TIMESTAMP: validateutils.validate_timestamp(value) return value elif type == Schema.LIST: validateutils.validate_list(value) if entry_schema: DataEntity.validate_entry(value, entry_schema, custom_def) return value elif type == Schema.SCALAR_UNIT_SIZE: return ScalarUnit_Size(value).validate_scalar_unit() elif type == Schema.SCALAR_UNIT_FREQUENCY: return ScalarUnit_Frequency(value).validate_scalar_unit() elif type == Schema.SCALAR_UNIT_TIME: return ScalarUnit_Time(value).validate_scalar_unit() elif type == Schema.VERSION: return validateutils.TOSCAVersionProperty(value).get_version() elif type == Schema.MAP: validateutils.validate_map(value) if entry_schema: DataEntity.validate_entry(value, entry_schema, custom_def) return value elif type == Schema.PORTSPEC: ps = PortSpec(value) ps.validate() return ps elif type == Schema.PORTDEF: return validateutils.validate_portdef(value, prop_name) elif not self: return DataEntity(type, value, custom_def).validate() else: # avoid infinite recursion return value
def test_get_num_from_scalar_unit_size_negative(self): try: (ScalarUnit_Size(self.InputMemSize).get_num_from_scalar_unit( self.UserInputUnit)) except Exception as error: self.assertIsInstance(error, ValueError) self.assertEqual( _('The unit "qB" is not valid. Valid units are ' '"[\'B\', \'GB\', \'GiB\', \'KiB\', \'MB\', ' '\'MiB\', \'TB\', \'TiB\', \'kB\']".'), error.__str__())
def validate_datatype(type, value, entry_schema=None, custom_def=None, prop_name=None): '''Validate value with given type. If type is list or map, validate its entry by entry_schema(if defined) If type is a user-defined complex datatype, custom_def is required. ''' from toscaparser.functions import is_function if is_function(value): return value if type == Schema.STRING: return validateutils.validate_string(value) elif type == Schema.INTEGER: return validateutils.validate_integer(value) elif type == Schema.FLOAT: return validateutils.validate_float(value) elif type == Schema.NUMBER: return validateutils.validate_numeric(value) elif type == Schema.BOOLEAN: return validateutils.validate_boolean(value) elif type == Schema.RANGE: return validateutils.validate_range(value) elif type == Schema.TIMESTAMP: validateutils.validate_timestamp(value) return value elif type == Schema.LIST: validateutils.validate_list(value) if entry_schema: DataEntity.validate_entry(value, entry_schema, custom_def) return value elif type == Schema.SCALAR_UNIT_SIZE: return ScalarUnit_Size(value).validate_scalar_unit() elif type == Schema.SCALAR_UNIT_FREQUENCY: return ScalarUnit_Frequency(value).validate_scalar_unit() elif type == Schema.SCALAR_UNIT_TIME: return ScalarUnit_Time(value).validate_scalar_unit() elif type == Schema.VERSION: return validateutils.TOSCAVersionProperty(value).get_version() elif type == Schema.MAP: validateutils.validate_map(value) if entry_schema: DataEntity.validate_entry(value, entry_schema, custom_def) return value elif type == Schema.PORTSPEC: # TODO(TBD) bug 1567063, validate source & target as PortDef type # as complex types not just as integers PortSpec.validate_additional_req(value, prop_name, custom_def) else: log.debug("Validate data {}: {}, def={}".format( type, value, custom_def)) data = DataEntity(type, value, custom_def) return data.validate()
def get_vm_flavor(specs): vm_flavor = {} if 'num_cpus' in specs: vm_flavor['vcpu-count'] = specs['num_cpus'] else: vm_flavor['vcpu-count'] = 1 if 'mem_size' in specs: vm_flavor['memory-mb'] = (ScalarUnit_Size( specs['mem_size']).get_num_from_scalar_unit('MB')) else: vm_flavor['memory-mb'] = 512 if 'disk_size' in specs: vm_flavor['storage-gb'] = (ScalarUnit_Size( specs['disk_size']).get_num_from_scalar_unit('GB')) else: vm_flavor['storage-gb'] = 4 return vm_flavor
def attributes_from_host(ctx): host = None for capability in ctx.currentResource.capabilities: if capability.name == "host": host = capability break if not host: raise ValueError("Can't choose machine type - host info not provided") if "num_cpus" not in host.attributes or "mem_size" not in host.attributes: raise ValueError( "Can't choose machine type - num_cpus and mem_size must be provided" ) num_cpus = host.attributes["num_cpus"] mem_size = host.attributes["mem_size"] mem_size = ScalarUnit_Size(mem_size).get_num_from_scalar_unit("MiB") return num_cpus, mem_size
def validate_datatype(type, value, entry_schema=None, custom_def=None): '''Validate value with given type. If type is list or map, validate its entry by entry_schema(if defined) If type is a user-defined complex datatype, custom_def is required. ''' if type == Schema.STRING: return validateutils.validate_string(value) elif type == Schema.INTEGER: return validateutils.validate_integer(value) elif type == Schema.FLOAT: return validateutils.validate_float(value) elif type == Schema.NUMBER: return validateutils.validate_number(value) elif type == Schema.BOOLEAN: return validateutils.validate_boolean(value) elif type == Schema.TIMESTAMP: validateutils.validate_timestamp(value) return value elif type == Schema.LIST: validateutils.validate_list(value) if entry_schema: DataEntity.validate_entry(value, entry_schema, custom_def) return value elif type == Schema.SCALAR_UNIT_SIZE: return ScalarUnit_Size(value).validate_scalar_unit() elif type == Schema.SCALAR_UNIT_FREQUENCY: return ScalarUnit_Frequency(value).validate_scalar_unit() elif type == Schema.SCALAR_UNIT_TIME: return ScalarUnit_Time(value).validate_scalar_unit() elif type == Schema.VERSION: return validateutils.TOSCAVersionProperty(value).get_version() elif type == Schema.MAP: validateutils.validate_map(value) if entry_schema: DataEntity.validate_entry(value, entry_schema, custom_def) return value else: data = DataEntity(type, value, custom_def) return data.validate()
def _translate_inputs(self): mano_inputs = [] if 'key_name' in self.parsed_params and 'key_name' not in self.inputs: name = 'key_name' type = 'string' default = self.parsed_params[name] schema_dict = {'type': type, 'default': default} input = Input(name, schema_dict) self.inputs.append(input) self.log.info(_('Translating TOSCA input type to MANO input type.')) for input in self.inputs: mano_default = None mano_input_type = TOSCA_TO_MANO_INPUT_TYPES[input.type] if input.name in self.parsed_params: mano_default = DataEntity.validate_datatype( input.type, self.parsed_params[input.name]) elif input.default is not None: mano_default = DataEntity.validate_datatype( input.type, input.default) else: if self.deploy: msg = _("Need to specify a value " "for input {0}.").format(input.name) self.log.error(msg) raise Exception(msg) if input.type == "scalar-unit.size": # Assumption here is to use this scalar-unit.size for size of # cinder volume in heat templates and will be in GB. # should add logic to support other types if needed. input_value = mano_default mano_default = (ScalarUnit_Size( mano_default).get_num_from_scalar_unit('GiB')) if mano_default == 0: msg = _('Unit value should be > 0.') self.log.error(msg) raise Exception(msg) elif int(mano_default) < mano_default: mano_default = int(mano_default) + 1 self.log.warning( _("Cinder unit value should be in" " multiples of GBs. So corrected" " %(input_value)s to %(mano_default)s" " GB.") % { 'input_value': input_value, 'mano_default': mano_default }) if input.type == 'version': mano_default = TOSCAVersionProperty(mano_default).get_version() mano_constraints = [] if input.constraints: for constraint in input.constraints: if mano_default: constraint.validate(mano_default) hc, hvalue = self._translate_constraints( constraint.constraint_key, constraint.constraint_value) mano_constraints.append({hc: hvalue}) mano_inputs.append( ManoParameter(self.log, name=input.name, type=mano_input_type, description=input.description, default=mano_default, constraints=mano_constraints)) return mano_inputs
def test_scenario_get_num_from_scalar_unit_size(self): resolved = (ScalarUnit_Size( self.InputMemSize).get_num_from_scalar_unit(self.UserInputUnit)) self.assertEqual(resolved, self.expected)