class NodeTemplates(Element): required = True schema = Dict(type=NodeTemplate) requires = { _plugins.Plugins: [Value('plugins')], _node_types.NodeTypes: ['host_types'] } provides = ['node_template_names', 'deployment_plugins_to_install'] def parse(self, host_types, plugins): processed_nodes = dict( (node.name, node.value) for node in self.children()) _process_nodes_plugins(processed_nodes=processed_nodes, host_types=host_types, plugins=plugins) return processed_nodes.values() def calculate_provided(self, **kwargs): return { 'node_template_names': set(c.name for c in self.children()), 'deployment_plugins_to_install': self._deployment_plugins() } def _deployment_plugins(self): deployment_plugins = {} for node in self.value: for deployment_plugin in \ node[constants.DEPLOYMENT_PLUGINS_TO_INSTALL]: plugin_name = deployment_plugin[constants.PLUGIN_NAME_KEY] deployment_plugins[plugin_name] = deployment_plugin return deployment_plugins.values()
class ResourceTags(DictElement): schema = Dict(type=ResourceTag) requires = { element_version.ToscaDefinitionsVersion: ['version'], 'inputs': ['validate_version'] } def validate(self, version, validate_version): if validate_version: self.validate_version(version, (1, 4))
class NodeTypes(types.Types): schema = Dict(type=NodeType) provides = ['host_types'] def calculate_provided(self): return {'host_types': self._types_derived_from(constants.HOST_TYPE)} def _types_derived_from(self, derived_from): return set(type_name for type_name, _type in self.value.items() if derived_from in _type[constants.TYPE_HIERARCHY])
class DataTypes(types.Types): schema = Dict(type=DataType) requires = { _version.ToscaDefinitionsVersion: ['version'], 'inputs': ['validate_version'] } def validate(self, version, validate_version): if validate_version: self.validate_version(version, (1, 2))
class Workflows(DictElement): schema = Dict(type=Workflow) requires = {_plugins.Plugins: [Value('plugins')]} provides = ['workflow_plugins_to_install'] def calculate_provided(self, plugins): workflow_plugins = [] workflow_plugin_names = set() for workflow, op_struct in self.value.items(): if op_struct['plugin'] not in workflow_plugin_names: plugin_name = op_struct['plugin'] workflow_plugins.append(plugins[plugin_name]) workflow_plugin_names.add(plugin_name) return {'workflow_plugins_to_install': workflow_plugins}
class NodeTypes(types.Types): schema = Dict(type=NodeType) provides = ['host_types'] def calculate_provided(self): return {'host_types': self._types_derived_from_host_type()} def _types_derived_from_host_type(self): """ Finding the types which derived from host type, while disregarding their namespace because host type is a base which will not change. """ return set(type_name for type_name, _type in self.value.items() if any(constants.HOST_TYPE in item for item in _type[constants.TYPE_HIERARCHY]))
class NodeTemplates(Element): schema = Dict(type=NodeTemplate) requires = { _plugins.Plugins: [Value('plugins')], _node_types.NodeTypes: ['host_types'] } provides = [ 'node_template_names', 'plugins_to_install' ] def parse(self, host_types, plugins): processed_nodes = dict((node.name, node.value) for node in self.children()) _process_nodes_plugins( processed_nodes=processed_nodes, host_types=host_types, plugins=plugins) return processed_nodes.values() def calculate_provided(self, **kwargs): return { 'node_template_names': set(c.name for c in self.children()), 'plugins_to_install': { constants.DEPLOYMENT_PLUGINS_TO_INSTALL: self._fetch_node_plugins( constants.DEPLOYMENT_PLUGINS_TO_INSTALL), constants.HOST_AGENT_PLUGINS_TO_INSTALL: self._fetch_node_plugins( constants.PLUGINS_TO_INSTALL) } } def _fetch_node_plugins(self, plugin_kind): used_plugins = {} for node in self.value: plugins = node.get(plugin_kind, []) for plugin in plugins: plugin_name = plugin[constants.PLUGIN_NAME_KEY] used_plugins[plugin_name] = plugin return used_plugins.values()
class NodeTemplateInterface(Interface): schema = Dict(type=NodeTemplateOperation)
class Schema(DictElement): schema = Dict(type=SchemaProperty)
class Capabilities(DictElement): schema = Dict(type=Capability)
class Outputs(DictElement): schema = Dict(type=Output)
class NodeTypeInterface(Interface): schema = Dict(type=NodeTypeOperation)
class BlueprintLabels(DictElement): schema = Dict(type=BlueprintLabel)
class Groups(DictElement): schema = Dict(type=Group)
class PolicyTypes(DictElement): schema = Dict(type=PolicyType)
class Inputs(Schema): schema = Dict(type=InputSchemaProperty)
class GroupPolicyTriggers(DictElement): schema = Dict(type=GroupPolicyTrigger)
class NodeTypeInterfaces(DictElement): schema = Dict(type=NodeTypeInterface)
class NodeTypeInterfaces(DictElement): schema = Dict(type=NodeTypeInterface) add_namespace_to_schema_elements = False
class NodeTypeInterface(Interface): schema = Dict(type=NodeTypeOperation) add_namespace_to_schema_elements = False
class Plugins(DictElement): schema = Dict(type=Plugin)
class GroupPolicies(DictElement): required = True schema = Dict(type=GroupPolicy)
class Relationships(types.Types): schema = Dict(type=Relationship)
class WorkflowParameters(data_types.Schema): add_namespace_to_schema_elements = False schema = Dict(type=ParameterSchemaProperty)
class GroupPolicies(DictElement): schema = Dict(type=GroupPolicy)
class NamespacesMapping(DictElement): """ An internal DSL element for mapping all the used blueprints import namespaces. """ schema = Dict(type=NamespaceMapping)
class Policies(DictElement): schema = Dict(type=Policy) requires = { Groups: [Value('groups')], _node_templates.NodeTemplates: [Value('node_templates')], _version.ToscaDefinitionsVersion: ['version'], 'inputs': ['validate_version'] } provides = ['scaling_groups'] def validate(self, version, validate_version, **kwargs): if validate_version: self.validate_version(version, (1, 3)) def calculate_provided(self, groups, node_templates, **kwargs): scaling_groups = self._create_scaling_groups(groups) # we can't perform the validation in "validate" because we need # the parsed value of "policies" which is only calculated in "parse" self._validate_and_update_groups(scaling_groups, node_templates) return { 'scaling_groups': scaling_groups } def _create_scaling_groups(self, groups): policies = self.value scaling_policies = [policy for policy in policies.values() if policy['type'] == constants.SCALING_POLICY] scaling_groups = {} for policy in scaling_policies: properties = policy['properties'] for target in policy['targets']: group = groups[target] scaling_groups[target] = { 'members': group['members'], 'properties': properties } return scaling_groups def _validate_and_update_groups(self, scaling_groups, node_templates): member_graph = nx.DiGraph() for group_name, group in scaling_groups.items(): for member in group['members']: member_graph.add_edge(member, group_name) node_graph = nx.DiGraph() for node in node_templates: node_graph.add_node(node['id']) for rel in node.get(constants.RELATIONSHIPS, []): if constants.CONTAINED_IN_REL_TYPE in rel['type_hierarchy']: node_graph.add_edge(node['id'], rel['target_id']) self._validate_no_group_cycles(member_graph) self._validate_members_in_one_group_only(member_graph) self._validate_no_contained_in_shares_group_with_non_contained_in( member_graph, node_graph) self._remove_contained_nodes_from_scaling_groups( scaling_groups, member_graph, node_graph) @staticmethod def _validate_no_group_cycles(member_graph): # verify no group cycles (i.e. group A in group B and vice versa) group_cycles = nx.recursive_simple_cycles(member_graph) if group_cycles: raise exceptions.DSLParsingLogicException( exceptions.ERROR_GROUP_CYCLE, 'Illegal group cycles found: {0}'.format(group_cycles)) @staticmethod def _validate_members_in_one_group_only(member_graph): # verify all group members are part of exactly one group for member in member_graph: successors = member_graph.successors(member) if len(successors) > 1: raise exceptions.DSLParsingLogicException( exceptions.ERROR_MULTIPLE_GROUPS, "Nodes and groups cannot be members in multiple groups, " "but member '{0}' belongs to the following multiple " "groups: {1}".format(member, successors)) @staticmethod def _validate_no_contained_in_shares_group_with_non_contained_in( member_graph, node_graph): # for each node a, if node a is (recursively) contained in node b # verify that it is not contained in (recursively) a group that has # nodes that are not (recursively) contained in node b too unless # node b is in that group as well # first extract all group members (recursively) group_members = {} for member in member_graph: if member in node_graph: continue group_members[member] = nx.ancestors(member_graph, member) # next, remove members that are groups themselves group_names = set(group_members.keys()) group_node_members = {} for group_name, members in group_members.items(): group_node_members[group_name] = members - group_names # now, for each group, for each node pair, verify both nodes in pair # are contained (recursively) in some third node or one of them is # contained in some third node that has this property containing_nodes = {} def check_pair(pair_key): node_a, node_b = pair_key if node_a == node_b: return True if node_a not in containing_nodes: containing_nodes[node_a] = nx.topological_sort( node_graph, nbunch=[node_a]) if node_b not in containing_nodes: containing_nodes[node_b] = nx.topological_sort( node_graph, nbunch=[node_b]) a_containing_nodes = set(containing_nodes[node_a]) a_containing_nodes.remove(node_a) b_containing_nodes = set(containing_nodes[node_b]) b_containing_nodes.remove(node_b) if not (a_containing_nodes or b_containing_nodes): return True if node_b in a_containing_nodes: return True if node_a in b_containing_nodes: return True if a_containing_nodes & b_containing_nodes: return True return False checked_pairs = set() ok_pairs = set() problematic_pairs = set() for node_members in group_node_members.values(): for node_a, node_b in itertools.product(node_members, repeat=2): pair_key = tuple(sorted([node_a, node_b])) if pair_key in checked_pairs: continue if check_pair(pair_key): ok_pairs.add(pair_key) else: problematic_pairs.add(pair_key) checked_pairs.add(pair_key) def check_problematic_pair(node_a, node_b): for node_a_containing_node in containing_nodes[node_a]: pair_key = tuple(sorted([node_b, node_a_containing_node])) if pair_key in ok_pairs: return True for node_b_containing_node in containing_nodes[node_b]: pair_key = tuple(sorted([node_a, node_b_containing_node])) if pair_key in ok_pairs: return True return False for node_a, node_b in problematic_pairs: if check_problematic_pair(node_a, node_b): ok_pairs.add((node_a, node_b)) else: raise exceptions.DSLParsingLogicException( exceptions.ERROR_NON_CONTAINED_GROUP_MEMBERS, "Node '{0}' and '{1}' belong to some shared group but " "they are not contained in any shared node, nor is any " "ancestor node of theirs.".format(node_a, node_b)) @staticmethod def _remove_contained_nodes_from_scaling_groups( scaling_groups, member_graph, node_graph): # for each node, if a node is (recursively) with # a node that contains it (recursively), remove the offending # member from the relevant group. # if the node and its containee are in the same group, remove the # containee, otherwise, remove the group closest to the containing # node for member in member_graph: if member not in node_graph: continue containing_groups = nx.topological_sort(member_graph, nbunch=[member]) containing_nodes = nx.topological_sort(node_graph, nbunch=[member]) for node in containing_nodes: if node == member: continue if node not in member_graph: continue containing_node_groups = nx.topological_sort(member_graph, nbunch=[node]) containing_node_groups_set = set(containing_node_groups) shared_groups = (set(containing_groups) & containing_node_groups_set) if not shared_groups: continue minimal_containing_group = nx.topological_sort( member_graph, nbunch=shared_groups)[0] direct_member_group = member_graph.successors(member)[0] members = scaling_groups[minimal_containing_group]['members'] if direct_member_group == minimal_containing_group: removed_member = member else: for containing_group in reversed(containing_groups): if containing_group not in containing_node_groups_set: removed_member = containing_group break else: raise RuntimeError('Illegal state') if removed_member in members: members.remove(removed_member)
class Labels(DictElement): schema = Dict(type=DeploymentLabel)
class PolicyTriggers(DictElement): schema = Dict(type=PolicyTrigger)
class DeploymentSchedules(DictElement): schema = Dict(type=DeploymentSchedule)