class NodeTemplateRelationshipProperties(Element):

    schema = Leaf(type=dict)
    requires = {
        NodeTemplateRelationshipType: [],
        _relationships.Relationships: [Value('relationships')],
        _data_types.DataTypes: [Value('data_types')]
    }

    def parse(self, relationships, data_types):
        relationship_type_name = self.sibling(
            NodeTemplateRelationshipType).value
        properties = self.initial_value or {}
        return utils.merge_schema_and_instance_properties(
            instance_properties=properties,
            schema_properties=relationships[relationship_type_name]
            ['properties'],
            data_types=data_types,
            undefined_property_error_message=(
                "'{0}' node relationship '{1}' property is not part of "
                "the derived relationship type properties schema"),
            missing_property_error_message=(
                "'{0}' node relationship does not provide a "
                "value for mandatory "
                "'{1}' property which is "
                'part of its relationship type schema'),
            node_name=self.ancestor(NodeTemplate).name)
Esempio n. 2
0
class GroupPolicyTriggerParameters(Element):

    schema = Leaf(type=dict)
    requires = {
        GroupPolicyTriggerType: [],
        PolicyTriggers: [Value('policy_triggers')],
        data_types.DataTypes: [Value('data_types')]
    }

    def parse(self, policy_triggers, data_types):
        trigger_type = policy_triggers[
            self.sibling(GroupPolicyTriggerType).value]
        policy_trigger_parameters = trigger_type.get('parameters', {})
        return utils.merge_schema_and_instance_properties(
            self.initial_value or {},
            policy_trigger_parameters,
            data_types,
            "{0} '{1}' property is not part of "
            "the policy type properties schema",
            "{0} does not provide a value for mandatory "
            "'{1}' property which is "
            "part of its policy type schema",
            node_name="group '{0}', policy '{1}' trigger '{2}'"
                      .format(self.ancestor(Group).name,
                              self.ancestor(GroupPolicy).name,
                              self.ancestor(GroupPolicyTrigger).name))
class NodeTemplateProperties(Element):

    schema = Leaf(type=dict)
    requires = {
        NodeTemplateType: [],
        _node_types.NodeTypes: [Value('node_types')],
        _data_types.DataTypes: [Value('data_types')]
    }

    def parse(self, node_types, data_types):
        properties = self.initial_value or {}
        node_type_name = self.sibling(NodeTemplateType).value
        node_type = node_types[node_type_name]
        return utils.merge_schema_and_instance_properties(
            instance_properties=properties,
            schema_properties=node_type['properties'],
            data_types=data_types,
            undefined_property_error_message=(
                "'{0}' node '{1}' property is not part of the derived"
                " type properties schema"),
            missing_property_error_message=("'{0}' node does not provide a "
                                            "value for mandatory "
                                            "'{1}' property which is "
                                            "part of its type schema"),
            node_name=self.ancestor(NodeTemplate).name)
Esempio n. 4
0
class Workflow(Element):

    required = True
    schema = [
        Leaf(type=str),
        {
            'mapping': WorkflowMapping,
            'parameters': WorkflowParameters,
            'is_cascading': WorkflowIsCascading
        }
    ]
    requires = {
        'inputs': [Requirement('resource_base', required=False)],
        _plugins.Plugins: [Value('plugins')],
        misc.NamespacesMapping: [Value(constants.NAMESPACES_MAPPING)]
    }

    def parse(self, plugins, resource_base, namespaces_mapping):
        if isinstance(self.initial_value, str):
            operation_content = {'mapping': self.initial_value,
                                 'parameters': {}}
            is_cascading = False
        else:
            operation_content = self.build_dict_result()
            is_cascading = self.initial_value.get('is_cascading', False)
        return operation.process_operation(
            plugins=plugins,
            operation_name=self.name,
            operation_content=operation_content,
            error_code=21,
            partial_error_message='',
            resource_bases=resource_base,
            remote_resources_namespaces=namespaces_mapping,
            is_workflows=True,
            is_workflow_cascading=is_cascading)
Esempio n. 5
0
class NodeTemplateRelationshipProperties(Element):

    schema = Leaf(type=dict)
    requires = {
        NodeTemplateRelationshipType: [],
        NodeTemplateRelationshipTarget: [],
        _relationships.Relationships: [Value('relationships')],
        _data_types.DataTypes: [Value('data_types')]
    }

    def parse(self, relationships, data_types):
        relationship_type_name = self.sibling(
            NodeTemplateRelationshipType).value
        properties = self.initial_value or {}
        return utils.merge_schema_and_instance_properties(
            instance_properties=properties,
            schema_properties=relationships[relationship_type_name]
            ['properties'],
            data_types=data_types,
            undefined_property_error_message=(
                "'{0}' node relationship '{1}' property is not part of "
                "the derived relationship type properties schema"),
            missing_property_error_message=(
                "'{0}' node relationship does not provide a "
                "value for mandatory "
                "'{1}' property which is "
                'part of its relationship type schema'),
            node_name=self.ancestor(NodeTemplate).name)

    def validate(self, relationships, **kwargs):
        relationship_type = self.sibling(NodeTemplateRelationshipType).value
        node_name = self.ancestor(NodeTemplate).name

        if (relationship_type ==
                'cloudify.relationships.depends_on_lifecycle_operation'):
            target_node = self.sibling(NodeTemplateRelationshipTarget).value
            properties = self.initial_value or {}
            node_templates = self.ancestor(NodeTemplates).initial_value_holder

            operation_input = properties.get('operation', None)
            if not operation_input:
                raise exceptions.DSLParsingLogicException(
                    215, 'For "{1}" node, please supply "{0}" with a defined '
                    'lifecycle operation target '.format(
                        relationship_type, node_name))

            _, target_node_interfaces = node_templates[target_node].get_item(
                'interfaces')
            if (not target_node_interfaces or operation_input not in
                    target_node_interfaces['cloudify.interfaces.lifecycle']):
                raise exceptions.DSLParsingLogicException(
                    216,
                    'Please define "{0}" operation in the target node "{1}" '
                    'for "{3}" node\'s "{2}" relationship'.format(
                        operation_input, target_node, relationship_type,
                        node_name))
Esempio n. 6
0
class Relationship(types.Type):

    schema = {
        'derived_from': types.RelationshipDerivedFrom,
        'properties': _data_types.SchemaWithInitialDefault,
        'source_interfaces': operation.NodeTypeInterfaces,
        'target_interfaces': operation.NodeTypeInterfaces,
    }
    requires = {
        'inputs': [Requirement('resource_base', required=False)],
        _plugins.Plugins: [Value('plugins')],
        'self': [
            Value('super_type',
                  predicate=types.derived_from_predicate,
                  required=False)
        ],
        _data_types.DataTypes: [Value('data_types')],
        misc.NamespacesMapping: [Value(constants.NAMESPACES_MAPPING)]
    }

    def parse(self, super_type, plugins, resource_base, data_types,
              namespaces_mapping):
        relationship_type = self.build_dict_result()
        if not relationship_type.get('derived_from'):
            relationship_type.pop('derived_from', None)
        relationship_type_name = self.name

        if super_type:
            relationship_type[constants.PROPERTIES] = utils.merge_schemas(
                overridden_schema=super_type.get('properties', {}),
                overriding_schema=relationship_type.get('properties', {}),
                data_types=data_types)
            for interfaces in [
                    constants.SOURCE_INTERFACES, constants.TARGET_INTERFACES
            ]:
                relationship_type[interfaces] = interfaces_parser. \
                    merge_relationship_type_interfaces(
                        overriding_interfaces=relationship_type[interfaces],
                        overridden_interfaces=super_type[interfaces])

        _validate_relationship_fields(
            rel_obj=relationship_type,
            plugins=plugins,
            rel_name=relationship_type_name,
            resource_base=resource_base,
            remote_resources_namespaces=namespaces_mapping)
        relationship_type['name'] = relationship_type_name
        relationship_type[
            constants.TYPE_HIERARCHY] = self.create_type_hierarchy(super_type)
        self.fix_properties(relationship_type)
        return relationship_type
Esempio n. 7
0
class Workflow(Element):

    required = True
    schema = [
        Leaf(type=str), {
            'mapping': WorkflowMapping,
            'parameters': data_types.Schema
        }
    ]
    requires = {
        'inputs': [Requirement('resource_base', required=False)],
        _plugins.Plugins: [Value('plugins')]
    }

    def parse(self, plugins, resource_base):
        if isinstance(self.initial_value, str):
            operation_content = {
                'mapping': self.initial_value,
                'parameters': {}
            }
        else:
            operation_content = self.build_dict_result()
        return operation.process_operation(plugins=plugins,
                                           operation_name=self.name,
                                           operation_content=operation_content,
                                           error_code=21,
                                           partial_error_message='',
                                           resource_base=resource_base,
                                           is_workflows=True)
class NodeTemplateRelationship(Element):

    schema = {
        'type': NodeTemplateRelationshipType,
        'target': NodeTemplateRelationshipTarget,
        'properties': NodeTemplateRelationshipProperties,
        'source_interfaces': _operation.NodeTemplateInterfaces,
        'target_interfaces': _operation.NodeTemplateInterfaces,
    }
    requires = {
        _relationships.Relationship: [
            Value('relationship_type',
                  predicate=_node_template_relationship_type_predicate)
        ]
    }

    def parse(self, relationship_type):
        result = self.build_dict_result()
        for interfaces in [
                constants.SOURCE_INTERFACES, constants.TARGET_INTERFACES
        ]:
            result[interfaces] = interfaces_parser. \
                merge_relationship_type_and_instance_interfaces(
                    relationship_type_interfaces=relationship_type[interfaces],
                    relationship_instance_interfaces=result[interfaces])

        result[constants.TYPE_HIERARCHY] = relationship_type[
            constants.TYPE_HIERARCHY]

        result['target_id'] = result['target']
        del result['target']

        return result
class NodeTemplates(Element):

    required = True
    schema = Dict(type=NodeTemplate)
    requires = {
        _plugins.Plugins: [Value('plugins')],
        _node_types.NodeTypes: ['host_types']
    }
    provides = ['node_template_names', 'deployment_plugins_to_install']

    def parse(self, host_types, plugins):
        processed_nodes = dict(
            (node.name, node.value) for node in self.children())
        _process_nodes_plugins(processed_nodes=processed_nodes,
                               host_types=host_types,
                               plugins=plugins)
        return processed_nodes.values()

    def calculate_provided(self, **kwargs):
        return {
            'node_template_names': set(c.name for c in self.children()),
            'deployment_plugins_to_install': self._deployment_plugins()
        }

    def _deployment_plugins(self):
        deployment_plugins = {}
        for node in self.value:
            for deployment_plugin in \
                    node[constants.DEPLOYMENT_PLUGINS_TO_INSTALL]:
                plugin_name = deployment_plugin[constants.PLUGIN_NAME_KEY]
                deployment_plugins[plugin_name] = deployment_plugin
        return deployment_plugins.values()
class NodeTemplateCapabilities(DictElement):

    schema = {'scalable': NodeTemplateCapabilitiesScalable}
    requires = {
        _version.ToscaDefinitionsVersion: ['version'],
        'inputs': ['validate_version'],
        NodeTemplateInstancesDeploy: [
            Value('instances_deploy',
                  required=False,
                  predicate=_instances_predicate)
        ]
    }

    def validate(self, version, validate_version, instances_deploy):
        if validate_version:
            self.validate_version(version, (1, 3))
        if instances_deploy is not None and self.initial_value is not None:
            raise exceptions.DSLParsingLogicException(
                exceptions.ERROR_INSTANCES_DEPLOY_AND_CAPABILITIES,
                "Node '{0}' defines both instances.deploy and "
                "capabilities.scalable (Note: instances.deploy is deprecated)".
                format(self.ancestor(NodeTemplate).name))

    def parse(self, instances_deploy, **kwargs):
        if self.initial_value is None:
            properties = scalable.Properties.DEFAULT.copy()
            if instances_deploy is not None:
                for key in properties:
                    if key not in ['min_instances', 'max_instances']:
                        properties[key] = instances_deploy
            return {'scalable': {'properties': properties}}
        else:
            return {
                'scalable': self.child(NodeTemplateCapabilitiesScalable).value
            }
class NodeTemplateType(Element):

    required = True
    schema = Leaf(type=str)
    requires = {_node_types.NodeTypes: [Value('node_types')]}

    def validate(self, node_types):
        if self.initial_value not in node_types:
            err_message = ("Could not locate node type: '{0}'; "
                           "existing types: {1}".format(
                               self.initial_value, node_types.keys()))
            raise exceptions.DSLParsingLogicException(7, err_message)
Esempio n. 12
0
class PolicyInstanceTarget(Element):

    schema = Leaf(type=str)
    requires = {Groups: [Value('groups')]}

    def validate(self, groups):
        if self.initial_value not in groups:
            raise exceptions.DSLParsingLogicException(
                exceptions.ERROR_NON_GROUP_TARGET,
                "'{0}' is not a valid policy target. Only groups are "
                "valid policy targets. Defined groups are {1}.".format(
                    self.initial_value, groups))
class NodeTemplateRelationshipType(Element):

    required = True
    schema = Leaf(type=str)
    requires = {_relationships.Relationships: [Value('relationships')]}

    def validate(self, relationships):
        if self.initial_value not in relationships:
            raise exceptions.DSLParsingLogicException(
                26, "A relationship instance under node '{0}' declares an "
                "undefined relationship type '{1}'".format(
                    self.ancestor(NodeTemplate).name, self.initial_value))
Esempio n. 14
0
class GroupPolicyType(Element):

    required = True
    schema = Leaf(type=str)
    requires = {PolicyTypes: [Value('policy_types')]}

    def validate(self, policy_types):
        if self.initial_value not in policy_types:
            raise exceptions.DSLParsingLogicException(
                41, "Policy '{0}' of group '{1}' references a non existent "
                "policy type '{2}'".format(
                    self.ancestor(GroupPolicy).name,
                    self.ancestor(Group).name, self.initial_value))
Esempio n. 15
0
class BlueprintVersionExtractor(Element):

    schema = {
        'tosca_definitions_version': _version.ToscaDefinitionsVersion,
        # here so it gets version validated
        'dsl_definitions': misc.DSLDefinitions
    }
    requires = {
        _version.ToscaDefinitionsVersion: ['version',
                                           Value('plan_version')]
    }

    def parse(self, version, plan_version):
        return {'version': version, 'plan_version': plan_version}
Esempio n. 16
0
class Workflows(DictElement):

    schema = Dict(type=Workflow)
    requires = {_plugins.Plugins: [Value('plugins')]}
    provides = ['workflow_plugins_to_install']

    def calculate_provided(self, plugins):
        workflow_plugins = []
        workflow_plugin_names = set()
        for workflow, op_struct in self.value.items():
            if op_struct['plugin'] not in workflow_plugin_names:
                plugin_name = op_struct['plugin']
                workflow_plugins.append(plugins[plugin_name])
                workflow_plugin_names.add(plugin_name)
        return {'workflow_plugins_to_install': workflow_plugins}
Esempio n. 17
0
class GroupPolicyTriggerType(Element):

    required = True
    schema = Leaf(type=basestring)
    requires = {PolicyTriggers: [Value(constants.POLICY_TRIGGERS)]}

    def validate(self, policy_triggers):
        if self.initial_value not in policy_triggers:
            raise exceptions.DSLParsingLogicException(
                42, "Trigger '{0}' of policy '{1}' of group '{2}' "
                "references a non existent "
                "'policy trigger '{3}'".format(
                    self.ancestor(GroupPolicyTrigger).name,
                    self.ancestor(GroupPolicy).name,
                    self.ancestor(Group).name, self.initial_value))
Esempio n. 18
0
class NodeTemplates(Element):

    schema = Dict(type=NodeTemplate)
    requires = {
        _plugins.Plugins: [Value('plugins')],
        _node_types.NodeTypes: ['host_types']
    }
    provides = [
        'node_template_names',
        'plugins_to_install'
    ]

    def parse(self, host_types, plugins):
        processed_nodes = dict((node.name, node.value)
                               for node in self.children())
        _process_nodes_plugins(
            processed_nodes=processed_nodes,
            host_types=host_types,
            plugins=plugins)
        return processed_nodes.values()

    def calculate_provided(self, **kwargs):
        return {
            'node_template_names': set(c.name for c in self.children()),
            'plugins_to_install':
                {
                     constants.DEPLOYMENT_PLUGINS_TO_INSTALL:
                     self._fetch_node_plugins(
                         constants.DEPLOYMENT_PLUGINS_TO_INSTALL),
                     constants.HOST_AGENT_PLUGINS_TO_INSTALL:
                     self._fetch_node_plugins(
                         constants.PLUGINS_TO_INSTALL)
                 }
        }

    def _fetch_node_plugins(self, plugin_kind):
        used_plugins = {}
        for node in self.value:
            plugins = node.get(plugin_kind, [])
            for plugin in plugins:
                plugin_name = plugin[constants.PLUGIN_NAME_KEY]
                used_plugins[plugin_name] = plugin
        return used_plugins.values()
Esempio n. 19
0
class DataType(types.Type):

    schema = {
        'properties': SchemaWithInitialDefault,
        'description': DataTypeDescription,
        'derived_from': types.DataTypeDerivedFrom,
        'version': DataTypeVersion
    }

    requires = {
        'self': [
            Requirement('component_types',
                        multiple_results=True,
                        required=False,
                        predicate=lambda source, target: target.name in source.
                        direct_component_types),
            Value('super_type',
                  predicate=types.derived_from_predicate,
                  required=False)
        ]
    }

    provides = ['component_types']

    def __init__(self, *args, **kwargs):
        super(DataType, self).__init__(*args, **kwargs)
        self._direct_component_types = None
        self.component_types = {}

    def validate(self, **kwargs):
        if self.name in constants.USER_PRIMITIVE_TYPES:
            raise exceptions.DSLParsingLogicException(
                exceptions.ERROR_INVALID_TYPE_NAME,
                'Can\'t redefine primitive type {0}'.format(self.name))

    def parse(self, super_type, component_types):
        merged_component_types = {}
        for component in component_types:
            merged_component_types.update(component)
        self.component_types.update(merged_component_types)
        result = self.build_dict_result()
        if constants.PROPERTIES not in result:
            result[constants.PROPERTIES] = {}
        if super_type:
            result[constants.PROPERTIES] = utils.merge_schemas(
                overridden_schema=super_type.get('properties', {}),
                overriding_schema=result.get('properties', {}),
                data_types=merged_component_types)
        self.fix_properties(result)
        self.component_types[self.name] = result
        return result

    def calculate_provided(self, **kwargs):
        return {'component_types': self.component_types}

    @property
    def direct_component_types(self):
        if self._direct_component_types is None:
            direct_component_types = set()
            parent_type = self.initial_value.get(constants.DERIVED_FROM)
            if parent_type:
                direct_component_types.add(parent_type)
            for desc in self.descendants(SchemaPropertyType):
                direct_component_types.add(desc.initial_value)
            self._direct_component_types = direct_component_types
        return self._direct_component_types
Esempio n. 20
0

class DataTypes(types.Types):

    schema = Dict(type=DataType)

    requires = {
        _version.ToscaDefinitionsVersion: ['version'],
        'inputs': ['validate_version']
    }

    def validate(self, version, validate_version):
        if validate_version:
            self.validate_version(version, (1, 2))


# source: element describing data_type name
# target: data_type
def _has_type(source, target):
    return source.initial_value == target.name


SchemaPropertyType.requires[DataType] = [
    Value('data_type', predicate=_has_type, required=False),
    Requirement('component_types', predicate=_has_type, required=False)
]
SchemaInputType.requires[DataType] = [
    Value('data_type', predicate=_has_type, required=False),
    Requirement('component_types', predicate=_has_type, required=False)
]
Esempio n. 21
0
class Policies(DictElement):

    schema = Dict(type=Policy)
    requires = {
        Groups: [Value('groups')],
        _node_templates.NodeTemplates: [Value('node_templates')],
        _version.ToscaDefinitionsVersion: ['version'],
        'inputs': ['validate_version']
    }
    provides = ['scaling_groups']

    def validate(self, version, validate_version, **kwargs):
        if validate_version:
            self.validate_version(version, (1, 3))

    def calculate_provided(self, groups, node_templates, **kwargs):
        scaling_groups = self._create_scaling_groups(groups)
        # we can't perform the validation in "validate" because we need
        # the parsed value of "policies" which is only calculated in "parse"
        self._validate_and_update_groups(scaling_groups, node_templates)
        return {
            'scaling_groups': scaling_groups
        }

    def _create_scaling_groups(self, groups):
        policies = self.value
        scaling_policies = [policy for policy in policies.values()
                            if policy['type'] == constants.SCALING_POLICY]
        scaling_groups = {}
        for policy in scaling_policies:
            properties = policy['properties']
            for target in policy['targets']:
                group = groups[target]
                scaling_groups[target] = {
                    'members': group['members'],
                    'properties': properties
                }
        return scaling_groups

    def _validate_and_update_groups(self, scaling_groups, node_templates):

        member_graph = nx.DiGraph()
        for group_name, group in scaling_groups.items():
            for member in group['members']:
                member_graph.add_edge(member, group_name)

        node_graph = nx.DiGraph()
        for node in node_templates:
            node_graph.add_node(node['id'])
            for rel in node.get(constants.RELATIONSHIPS, []):
                if constants.CONTAINED_IN_REL_TYPE in rel['type_hierarchy']:
                    node_graph.add_edge(node['id'], rel['target_id'])

        self._validate_no_group_cycles(member_graph)
        self._validate_members_in_one_group_only(member_graph)
        self._validate_no_contained_in_shares_group_with_non_contained_in(
            member_graph, node_graph)
        self._remove_contained_nodes_from_scaling_groups(
            scaling_groups, member_graph, node_graph)

    @staticmethod
    def _validate_no_group_cycles(member_graph):
        # verify no group cycles (i.e. group A in group B and vice versa)
        group_cycles = nx.recursive_simple_cycles(member_graph)
        if group_cycles:
            raise exceptions.DSLParsingLogicException(
                exceptions.ERROR_GROUP_CYCLE,
                'Illegal group cycles found: {0}'.format(group_cycles))

    @staticmethod
    def _validate_members_in_one_group_only(member_graph):
        # verify all group members are part of exactly one group
        for member in member_graph:
            successors = member_graph.successors(member)
            if len(successors) > 1:
                raise exceptions.DSLParsingLogicException(
                    exceptions.ERROR_MULTIPLE_GROUPS,
                    "Nodes and groups cannot be members in multiple groups, "
                    "but member '{0}' belongs to the following multiple "
                    "groups: {1}".format(member, successors))

    @staticmethod
    def _validate_no_contained_in_shares_group_with_non_contained_in(
            member_graph, node_graph):
        # for each node a, if node a is (recursively) contained in node b
        # verify that it is not contained in (recursively) a group that has
        # nodes that are not (recursively) contained in node b too unless
        # node b is in that group as well

        # first extract all group members (recursively)
        group_members = {}
        for member in member_graph:
            if member in node_graph:
                continue
            group_members[member] = nx.ancestors(member_graph, member)

        # next, remove members that are groups themselves
        group_names = set(group_members.keys())
        group_node_members = {}
        for group_name, members in group_members.items():
            group_node_members[group_name] = members - group_names

        # now, for each group, for each node pair, verify both nodes in pair
        # are contained (recursively) in some third node or one of them is
        # contained in some third node that has this property
        containing_nodes = {}

        def check_pair(pair_key):
            node_a, node_b = pair_key
            if node_a == node_b:
                return True
            if node_a not in containing_nodes:
                containing_nodes[node_a] = nx.topological_sort(
                    node_graph, nbunch=[node_a])
            if node_b not in containing_nodes:
                containing_nodes[node_b] = nx.topological_sort(
                    node_graph, nbunch=[node_b])
            a_containing_nodes = set(containing_nodes[node_a])
            a_containing_nodes.remove(node_a)
            b_containing_nodes = set(containing_nodes[node_b])
            b_containing_nodes.remove(node_b)
            if not (a_containing_nodes or b_containing_nodes):
                return True
            if node_b in a_containing_nodes:
                return True
            if node_a in b_containing_nodes:
                return True
            if a_containing_nodes & b_containing_nodes:
                return True
            return False

        checked_pairs = set()
        ok_pairs = set()
        problematic_pairs = set()
        for node_members in group_node_members.values():
            for node_a, node_b in itertools.product(node_members, repeat=2):
                pair_key = tuple(sorted([node_a, node_b]))
                if pair_key in checked_pairs:
                    continue
                if check_pair(pair_key):
                    ok_pairs.add(pair_key)
                else:
                    problematic_pairs.add(pair_key)
                checked_pairs.add(pair_key)

        def check_problematic_pair(node_a, node_b):
            for node_a_containing_node in containing_nodes[node_a]:
                pair_key = tuple(sorted([node_b, node_a_containing_node]))
                if pair_key in ok_pairs:
                    return True
            for node_b_containing_node in containing_nodes[node_b]:
                pair_key = tuple(sorted([node_a, node_b_containing_node]))
                if pair_key in ok_pairs:
                    return True
            return False

        for node_a, node_b in problematic_pairs:
            if check_problematic_pair(node_a, node_b):
                ok_pairs.add((node_a, node_b))
            else:
                raise exceptions.DSLParsingLogicException(
                    exceptions.ERROR_NON_CONTAINED_GROUP_MEMBERS,
                    "Node '{0}' and '{1}' belong to some shared group but "
                    "they are not contained in any shared node, nor is any "
                    "ancestor node of theirs.".format(node_a, node_b))

    @staticmethod
    def _remove_contained_nodes_from_scaling_groups(
            scaling_groups, member_graph, node_graph):
        # for each node, if a node is (recursively) with
        # a node that contains it (recursively), remove the offending
        # member from the relevant group.
        # if the node and its containee are in the same group, remove the
        # containee, otherwise, remove the group closest to the containing
        # node
        for member in member_graph:
            if member not in node_graph:
                continue
            containing_groups = nx.topological_sort(member_graph,
                                                    nbunch=[member])
            containing_nodes = nx.topological_sort(node_graph, nbunch=[member])
            for node in containing_nodes:
                if node == member:
                    continue
                if node not in member_graph:
                    continue

                containing_node_groups = nx.topological_sort(member_graph,
                                                             nbunch=[node])
                containing_node_groups_set = set(containing_node_groups)

                shared_groups = (set(containing_groups) &
                                 containing_node_groups_set)
                if not shared_groups:
                    continue

                minimal_containing_group = nx.topological_sort(
                    member_graph, nbunch=shared_groups)[0]
                direct_member_group = member_graph.successors(member)[0]
                members = scaling_groups[minimal_containing_group]['members']
                if direct_member_group == minimal_containing_group:
                    removed_member = member
                else:
                    for containing_group in reversed(containing_groups):
                        if containing_group not in containing_node_groups_set:
                            removed_member = containing_group
                            break
                    else:
                        raise RuntimeError('Illegal state')

                if removed_member in members:
                    members.remove(removed_member)
class NodeTemplate(Element):

    schema = {
        'type': NodeTemplateType,
        'instances': NodeTemplateInstances,
        'capabilities': NodeTemplateCapabilities,
        'interfaces': _operation.NodeTemplateInterfaces,
        'relationships': NodeTemplateRelationships,
        'properties': NodeTemplateProperties,
    }
    requires = {
        'inputs': [Requirement('resource_base', required=False)],
        'self': [
            Value('related_node_templates',
                  predicate=_node_template_related_nodes_predicate,
                  multiple_results=True)
        ],
        _plugins.Plugins: [Value('plugins')],
        _node_types.NodeType:
        [Value('node_type', predicate=_node_template_node_type_predicate)],
        _node_types.NodeTypes: ['host_types']
    }

    def parse(self, node_type, host_types, plugins, resource_base,
              related_node_templates):
        node = self.build_dict_result()
        node.update({
            'name':
            self.name,
            'id':
            self.name,
            constants.TYPE_HIERARCHY:
            node_type[constants.TYPE_HIERARCHY]
        })

        node[constants.INTERFACES] = interfaces_parser.\
            merge_node_type_and_node_template_interfaces(
                node_type_interfaces=node_type[constants.INTERFACES],
                node_template_interfaces=node[constants.INTERFACES])

        node['operations'] = _process_operations(
            partial_error_message="in node '{0}' of type '{1}'".format(
                node['id'], node['type']),
            interfaces=node[constants.INTERFACES],
            plugins=plugins,
            error_code=10,
            resource_base=resource_base)

        node_name_to_node = dict(
            (node['id'], node) for node in related_node_templates)
        _post_process_node_relationships(processed_node=node,
                                         node_name_to_node=node_name_to_node,
                                         plugins=plugins,
                                         resource_base=resource_base)

        contained_in = self.child(
            NodeTemplateRelationships).provided['contained_in']
        if self.child(NodeTemplateType).value in host_types:
            node['host_id'] = self.name
        elif contained_in:
            containing_node = [
                n for n in related_node_templates if n['name'] == contained_in
            ][0]
            if 'host_id' in containing_node:
                node['host_id'] = containing_node['host_id']

        return node