class InputSchemaPropertyDefault(SchemaPropertyDefault): requires = { SchemaInputType: [ Requirement('component_types', required=False, predicate=sibling_predicate) ] } def parse(self, component_types): initial_value = self.initial_value if initial_value is None: return None type_name = self.sibling(SchemaInputType).value component_types = component_types or {} undefined_property_error = "Undefined property {1} in default value " \ "of input {0}." input_name = self.ancestor(InputSchemaProperty).name return utils.parse_value( value=initial_value, type_name=type_name, data_types=component_types, undefined_property_error_message=undefined_property_error, missing_property_error_message="Value of input {0} is missing " "property {1}.", node_name=input_name, path=[], raise_on_missing_property=True)
class SchemaPropertyDefault(Element): schema = Leaf(type=elements.PRIMITIVE_TYPES) requires = { SchemaPropertyType: [ Requirement('component_types', required=False, predicate=sibling_predicate) ] } def parse(self, component_types): type_name = self.sibling(SchemaPropertyType).value initial_value = self.initial_value if initial_value is None: if type_name is not None \ and type_name not in constants.USER_PRIMITIVE_TYPES: initial_value = {} else: return None component_types = component_types or {} prop_name = self.ancestor(SchemaProperty).name undefined_property_error = 'Undefined property {1} in default' \ ' value of type {0}' current_type = self.ancestor(Schema).parent().name return utils.parse_value( value=initial_value, type_name=type_name, data_types=component_types, undefined_property_error_message=undefined_property_error, missing_property_error_message='illegal state', node_name=current_type, path=[prop_name], raise_on_missing_property=False)
def _calculate_element_graph(self): self.element_graph = nx.DiGraph(self._element_tree) for element_type, _elements in self.element_type_to_elements.items(): requires = element_type.requires for requirement, requirement_values in requires.items(): requirement_values = [ Requirement(r) if isinstance(r, basestring) else r for r in requirement_values] if requirement == 'inputs': continue if requirement == 'self': requirement = element_type dependencies = self.element_type_to_elements.get( requirement, []) for dependency in dependencies: for element in _elements: predicates = [r.predicate for r in requirement_values if r.predicate is not None] add_dependency = not predicates or all([ predicate(element, dependency) for predicate in predicates]) if add_dependency: self.element_graph.add_edge(element, dependency) # we reverse the graph because only netorkx 1.9.1 has the reverse # flag in the topological sort function, it is only used by it # so this should be good self.element_graph.reverse(copy=False)
class Workflow(Element): required = True schema = [ Leaf(type=str), { 'mapping': WorkflowMapping, 'parameters': WorkflowParameters, 'is_cascading': WorkflowIsCascading } ] requires = { 'inputs': [Requirement('resource_base', required=False)], _plugins.Plugins: [Value('plugins')], misc.NamespacesMapping: [Value(constants.NAMESPACES_MAPPING)] } def parse(self, plugins, resource_base, namespaces_mapping): if isinstance(self.initial_value, str): operation_content = {'mapping': self.initial_value, 'parameters': {}} is_cascading = False else: operation_content = self.build_dict_result() is_cascading = self.initial_value.get('is_cascading', False) return operation.process_operation( plugins=plugins, operation_name=self.name, operation_content=operation_content, error_code=21, partial_error_message='', resource_bases=resource_base, remote_resources_namespaces=namespaces_mapping, is_workflows=True, is_workflow_cascading=is_cascading)
class Workflow(Element): required = True schema = [ Leaf(type=str), { 'mapping': WorkflowMapping, 'parameters': data_types.Schema } ] requires = { 'inputs': [Requirement('resource_base', required=False)], _plugins.Plugins: [Value('plugins')] } def parse(self, plugins, resource_base): if isinstance(self.initial_value, str): operation_content = { 'mapping': self.initial_value, 'parameters': {} } else: operation_content = self.build_dict_result() return operation.process_operation(plugins=plugins, operation_name=self.name, operation_content=operation_content, error_code=21, partial_error_message='', resource_base=resource_base, is_workflows=True)
class Relationship(types.Type): schema = { 'derived_from': types.RelationshipDerivedFrom, 'properties': _data_types.SchemaWithInitialDefault, 'source_interfaces': operation.NodeTypeInterfaces, 'target_interfaces': operation.NodeTypeInterfaces, } requires = { 'inputs': [Requirement('resource_base', required=False)], _plugins.Plugins: [Value('plugins')], 'self': [ Value('super_type', predicate=types.derived_from_predicate, required=False) ], _data_types.DataTypes: [Value('data_types')], misc.NamespacesMapping: [Value(constants.NAMESPACES_MAPPING)] } def parse(self, super_type, plugins, resource_base, data_types, namespaces_mapping): relationship_type = self.build_dict_result() if not relationship_type.get('derived_from'): relationship_type.pop('derived_from', None) relationship_type_name = self.name if super_type: relationship_type[constants.PROPERTIES] = utils.merge_schemas( overridden_schema=super_type.get('properties', {}), overriding_schema=relationship_type.get('properties', {}), data_types=data_types) for interfaces in [ constants.SOURCE_INTERFACES, constants.TARGET_INTERFACES ]: relationship_type[interfaces] = interfaces_parser. \ merge_relationship_type_interfaces( overriding_interfaces=relationship_type[interfaces], overridden_interfaces=super_type[interfaces]) _validate_relationship_fields( rel_obj=relationship_type, plugins=plugins, rel_name=relationship_type_name, resource_base=resource_base, remote_resources_namespaces=namespaces_mapping) relationship_type['name'] = relationship_type_name relationship_type[ constants.TYPE_HIERARCHY] = self.create_type_hierarchy(super_type) self.fix_properties(relationship_type) return relationship_type
def _calculate_element_graph(self): self.element_graph = nx.DiGraph(self._element_tree) for element_type, _elements in self.element_type_to_elements.items(): requires = element_type.requires for requirement, requirement_values in requires.items(): requirement_values = [ Requirement(r) if isinstance(r, text_type) else r for r in requirement_values ] if requirement == 'inputs': continue if requirement == 'self': requirement = element_type dependencies = self.element_type_to_elements.get( requirement, []) predicates = [ r.predicate for r in requirement_values if r.predicate is not None ] if not predicates: dep = _BatchDependency(element_type, requirement) for dependency in dependencies: self.element_graph.add_edge(dep, dependency) for element in _elements: self.element_graph.add_edge(element, dep) continue if predicates == [sibling_predicate]: # If we don't do this, our time complexity is n**2 as # we compare all 'default' elements to all 'type' # elements (for example), when all we care about is if # they are siblings. for dependency in dependencies: for element in dependency.parent().children(): if element in _elements: self.element_graph.add_edge( element, dependency) continue for dependency in dependencies: for element in _elements: add_dependency = all( predicate(element, dependency) for predicate in predicates) if add_dependency: self.element_graph.add_edge(element, dependency) # we reverse the graph because only netorkx 1.9.1 has the reverse # flag in the topological sort function, it is only used by it # so this should be good self.element_graph.reverse(copy=False)
class DataType(types.Type): schema = { 'properties': SchemaWithInitialDefault, 'description': DataTypeDescription, 'derived_from': types.DataTypeDerivedFrom, 'version': DataTypeVersion } requires = { 'self': [ Requirement('component_types', multiple_results=True, required=False, predicate=lambda source, target: target.name in source. direct_component_types), Value('super_type', predicate=types.derived_from_predicate, required=False) ] } provides = ['component_types'] def __init__(self, *args, **kwargs): super(DataType, self).__init__(*args, **kwargs) self._direct_component_types = None self.component_types = {} def validate(self, **kwargs): if self.name in constants.USER_PRIMITIVE_TYPES: raise exceptions.DSLParsingLogicException( exceptions.ERROR_INVALID_TYPE_NAME, 'Can\'t redefine primitive type {0}'.format(self.name)) def parse(self, super_type, component_types): merged_component_types = {} for component in component_types: merged_component_types.update(component) self.component_types.update(merged_component_types) result = self.build_dict_result() if constants.PROPERTIES not in result: result[constants.PROPERTIES] = {} if super_type: result[constants.PROPERTIES] = utils.merge_schemas( overridden_schema=super_type.get('properties', {}), overriding_schema=result.get('properties', {}), data_types=merged_component_types) self.fix_properties(result) self.component_types[self.name] = result return result def calculate_provided(self, **kwargs): return {'component_types': self.component_types} @property def direct_component_types(self): if self._direct_component_types is None: direct_component_types = set() parent_type = self.initial_value.get(constants.DERIVED_FROM) if parent_type: direct_component_types.add(parent_type) for desc in self.descendants(SchemaPropertyType): direct_component_types.add(desc.initial_value) self._direct_component_types = direct_component_types return self._direct_component_types
class DataTypes(types.Types): schema = Dict(type=DataType) requires = { _version.ToscaDefinitionsVersion: ['version'], 'inputs': ['validate_version'] } def validate(self, version, validate_version): if validate_version: self.validate_version(version, (1, 2)) # source: element describing data_type name # target: data_type def _has_type(source, target): return source.initial_value == target.name SchemaPropertyType.requires[DataType] = [ Value('data_type', predicate=_has_type, required=False), Requirement('component_types', predicate=_has_type, required=False) ] SchemaInputType.requires[DataType] = [ Value('data_type', predicate=_has_type, required=False), Requirement('component_types', predicate=_has_type, required=False) ]
class NodeTemplate(Element): schema = { 'type': NodeTemplateType, 'instances': NodeTemplateInstances, 'capabilities': NodeTemplateCapabilities, 'interfaces': _operation.NodeTemplateInterfaces, 'relationships': NodeTemplateRelationships, 'properties': NodeTemplateProperties, } requires = { 'inputs': [Requirement('resource_base', required=False)], 'self': [ Value('related_node_templates', predicate=_node_template_related_nodes_predicate, multiple_results=True) ], _plugins.Plugins: [Value('plugins')], _node_types.NodeType: [Value('node_type', predicate=_node_template_node_type_predicate)], _node_types.NodeTypes: ['host_types'] } def parse(self, node_type, host_types, plugins, resource_base, related_node_templates): node = self.build_dict_result() node.update({ 'name': self.name, 'id': self.name, constants.TYPE_HIERARCHY: node_type[constants.TYPE_HIERARCHY] }) node[constants.INTERFACES] = interfaces_parser.\ merge_node_type_and_node_template_interfaces( node_type_interfaces=node_type[constants.INTERFACES], node_template_interfaces=node[constants.INTERFACES]) node['operations'] = _process_operations( partial_error_message="in node '{0}' of type '{1}'".format( node['id'], node['type']), interfaces=node[constants.INTERFACES], plugins=plugins, error_code=10, resource_base=resource_base) node_name_to_node = dict( (node['id'], node) for node in related_node_templates) _post_process_node_relationships(processed_node=node, node_name_to_node=node_name_to_node, plugins=plugins, resource_base=resource_base) contained_in = self.child( NodeTemplateRelationships).provided['contained_in'] if self.child(NodeTemplateType).value in host_types: node['host_id'] = self.name elif contained_in: containing_node = [ n for n in related_node_templates if n['name'] == contained_in ][0] if 'host_id' in containing_node: node['host_id'] = containing_node['host_id'] return node
def _extract_element_requirements(element): context = element.context required_args = {} for required_type, requirements in element.requires.items(): requirements = [ Requirement(r) if isinstance(r, text_type) else r for r in requirements ] if not requirements: # only set required type as a logical dependency pass elif required_type == 'inputs': for input in requirements: if input.name not in context.inputs and input.required: raise exceptions.DSLParsingFormatException( 1, "Missing required input '{0}'. " "Existing inputs: ".format(input.name)) required_args[input.name] = context.inputs.get(input.name) else: if required_type == 'self': required_type = type(element) if (len(requirements) == 1 and requirements[0].predicate == sibling_predicate): # Similar to the other siblings predicate check above, # doing this saves a massive amount of time on larger # blueprints by avoiding n**2 time complexity. required_type_elements = [ child for child in element.parent().children() if isinstance(child, required_type) ] else: required_type_elements = ( context.element_type_to_elements.get( required_type, [])) for requirement in requirements: result = [] for required_element in required_type_elements: if requirement.predicate and not requirement.predicate( element, required_element): continue if requirement.parsed: result.append(required_element.value) else: if (requirement.name not in required_element.provided): provided = list(required_element.provided) if requirement.required: raise exceptions.DSLParsingFormatException( 1, "Required value '{0}' is not " "provided by '{1}'. Provided values " "are: {2}".format( requirement.name, required_element.name, provided)) else: continue result.append( required_element.provided[requirement.name]) if len(result) != 1 and not requirement.multiple_results: if requirement.required: raise exceptions.DSLParsingFormatException( 1, "Expected exactly one result for " "requirement '{0}' but found {1}".format( requirement.name, 'none' if not result else result)) elif not result: result = [None] else: raise ValueError('Illegal state') if not requirement.multiple_results: result = result[0] required_args[requirement.name] = result return required_args
def _extract_element_requirements(element): context = element.context required_args = {} for required_type, requirements in element.requires.items(): requirements = [Requirement(r) if isinstance(r, basestring) else r for r in requirements] if not requirements: # only set required type as a logical dependency pass elif required_type == 'inputs': for input in requirements: if input.name not in context.inputs and input.required: raise exceptions.DSLParsingFormatException( 1, "Missing required input '{0}'. " "Existing inputs: " .format(input.name, context.inputs.keys())) required_args[input.name] = context.inputs.get(input.name) else: if required_type == 'self': required_type = type(element) required_type_elements = context.element_type_to_elements.get( required_type, []) for requirement in requirements: result = [] for required_element in required_type_elements: if requirement.predicate and not requirement.predicate( element, required_element): continue if requirement.parsed: result.append(required_element.value) else: if (requirement.name not in required_element.provided): provided = required_element.provided.keys() if requirement.required: raise exceptions.DSLParsingFormatException( 1, "Required value '{0}' is not " "provided by '{1}'. Provided values " "are: {2}" .format(requirement.name, required_element.name, provided)) else: continue result.append(required_element.provided[ requirement.name]) if len(result) != 1 and not requirement.multiple_results: if requirement.required: raise exceptions.DSLParsingFormatException( 1, "Expected exactly one result for " "requirement '{0}' but found {1}" .format(requirement.name, 'none' if not result else result)) elif not result: result = [None] else: raise ValueError('Illegal state') if not requirement.multiple_results: result = result[0] required_args[requirement.name] = result return required_args