コード例 #1
0
ファイル: policytypes.py プロジェクト: ditrit/workflows
def parse_policy_types(toscayaml, model_name):
    """
      Create types internal representation
  """
    pol_types = {}
    pol_def = toscayaml.get('policy_types')

    if type(pol_def) is dict:
        for type_name, type_def in pol_def.items():
            val = {}
            if type_name in pol_types:
                val = pol_types[type_name]
            val['name'] = type_name

            # get the list of parents
            val['types'] = get_types(pol_def, type_name)
            if len(val['types']) > 0:
                val['derived_from'] = val['types'][0]
            else:
                print "ERROR : types list is empty for policy type '{}'".format(
                    type_name)

            # get the version of the node type
            val['version'] = str(Version(type_def.get('version')))

            # get metadata
            val['metadata'] = {}
            metadata = type_def.get('metadata')
            if isinstance(metadata, dict) and all(
                    map(lambda x: isinstance(x, basestring),
                        metadata.values())):
                val['metadata'] = metadata

            # get descrption
            val['description'] = ""
            descr = type_def.get('description')
            if descr is not None:
                val['description'] = descr

            # get properties and attributes
            val['properties'] = get_property_definitions(
                type_def.get('properties'))

            # get targets
            ## TODO: verify each member is a node type or a group type
            val['targets'] = []
            targets = type_def.get('targets')
            if isinstance(targets, list) and all(
                    map(lambda x: isinstance(x, basestring), targets)):
                val['targets'] = targets

            # get triggers
            val['triggers'] = get_trigger_definitions(type_def.get('triggers'))

            pol_types[type_name] = val

    # store in the ditrit space
    for type_name, type_val in pol_types.items():
        linda_out('types/{}/policies/{}'.format(model_name, type_name),
                  type_val)
コード例 #2
0
ファイル: grouptypes.py プロジェクト: ditrit/workflows
def parse_group_types(toscayaml, model_name):
  """
      Create types internal representation
  """
  grp_types = {}
  grp_def = toscayaml.get('group_types')

  if type(grp_def) is dict:
    for type_name, type_def in grp_def.items():
      val = {}
      if type_name in grp_types:
        val = grp_types[type_name]
      val['name'] = type_name

      # get the list of parents
      val['types'] = get_types(grp_def, type_name)
      if len(val['types']) > 0:
        val['derived_from'] = val['types'][0]
      else:
        print "ERROR : types list is empty for group type '{}'".format(type_name)
    
      # get the version of the node type
      val['version'] = str(Version(type_def.get('version')))

      # get metadata
      val['metadata'] = {}
      metadata = type_def.get('metadata')
      if isinstance(metadata, dict) and all(map(lambda x: isinstance(x, basestring), metadata.values())):
        val['metadata'] = metadata

      # get descrption
      val['description'] = ""
      descr = type_def.get('description')
      if descr is not None:
        val['description'] = descr

      # get properties and attributes
      val['properties']   = get_property_definitions(type_def.get('properties'))

      # get requirements and capabilities
      val['requirements'] = get_requirement_definitions(type_def.get('requirements'))
      val['capabilities'] = get_capability_definitions(type_def.get('capabilities'))

      # get members 
      ## TODO: verify each member is a nodei type, a capability type or a group type
      val['members'] = []
      members = type_def.get('members')
      if isinstance(members, list) and all(map(lambda x: isinstance(x, basestring), members)):
        val['members'] = members

      # get interfaces
      val['interfaces'] = get_interface_definitions(type_def.get('interfaces'))

      grp_types[type_name] = val

  # store in the ditrit space
  for type_name, type_val in grp_types.items():
    linda_out('types/{}/groups/{}'.format(model_name, type_name), type_val)
コード例 #3
0
def parse_data_types(toscayaml, model_name):
    """
      Create types internal representation
  """
    data_types = {}

    data_def = copy.deepcopy(basetypes)
    if 'data_types' in toscayaml.keys():
        data_def.update(toscayaml['data_types'])

    if type(data_def) is dict:
        for type_name, type_def in data_def.items():
            val = {}
            if type_name in data_types:
                val = data_types[type_name]
            val['name'] = type_name

            # get the list of parents
            val['types'] = get_types(data_def, type_name)
            if len(val['types']) > 0:
                val['derived_from'] = val['types'][0]
            else:
                print "ERROR : types list is empty for data type '{}'".format(
                    type_name)

            # get the version of the node type
            val['version'] = str(Version(type_def.get('version')))

            # get metadata
            val['metadata'] = {}
            metadata = type_def.get('metadata')
            if isinstance(metadata, dict) and all(
                    map(lambda x: isinstance(x, basestring),
                        metadata.values())):
                val['metadata'] = metadata

            # get descrption
            val['description'] = ""
            descr = type_def.get('description')
            if descr is not None:
                val['description'] = descr

            # get constraints
            ## TODO: verify constraint is ok with parse_constraint (should retuirn None if not ok)
            val['constraints'] = type_def.get('constraints')

            # get properties
            val['properties'] = get_property_definitions(
                type_def.get('properties'))

            data_types[type_name] = val

    # store in the ditrit space
    for type_name, type_val in data_types.items():
        linda_out('types/{}/datatypes/{}'.format(model_name, type_name),
                  type_val)
コード例 #4
0
ファイル: interfacetypes.py プロジェクト: ditrit/workflows
def parse_interface_types(toscayaml, model_name):
    """
      Create types internal representation
  """
    if_types = {}
    if_def = toscayaml.get('interface_types')
    if type(if_def) is dict:
        for type_name, type_def in if_def.items():
            val = {}
            if type_name in if_types:
                val = if_types[type_name]
            val['name'] = type_name

            # get the list of parents
            val['types'] = get_types(if_def, type_name)
            if len(val['types']) > 0:
                val['derived_from'] = val['types'][0]
            else:
                print "ERROR : types list is empty for interface type '{}'".format(
                    type_name)

            # get the version of the node type
            val['version'] = str(Version(type_def.get('version')))

            # get metadata
            val['metadata'] = {}
            metadata = type_def.get('metadata')
            if isinstance(metadata, dict) and all(
                    map(lambda x: isinstance(x, basestring),
                        metadata.values())):
                val['metadata'] = metadata

            # get descrption
            val['description'] = ""
            descr = type_def.get('description')
            if descr is not None:
                val['description'] = descr

            # get properties and attributes
            val['inputs'] = get_property_definitions(type_def.get('inputs'))

            # get requirements and capabilities
            val['operations'] = get_operation_definitions(type_def)

            if_types[type_name] = val

    # store in the ditrit space
    for type_name, type_val in if_types.items():
        linda_out('types/{}/interfaces/{}'.format(model_name, type_name),
                  type_val)
コード例 #5
0
ファイル: capabilitytypes.py プロジェクト: ditrit/workflows
def parse_capability_types(toscayaml, model_name):
    """
      Create types internal representation
  """
    capa_types = {}
    # Part of the workflows defined in nodes have to be parsed before the part defined in relations.
    capa_def = toscayaml.get('capability_types')
    if type(capa_def) is dict:
        for type_name, type_def in capa_def.items():
            val = {}
            if type_name in capa_types:
                val = capa_types[type_name]
            val['name'] = type_name

            # get the list of parents
            val['types'] = get_types(capa_def, type_name)
            if len(val['types']) > 0:
                val['derived_from'] = val['types'][0]
            else:
                print "ERROR : types list is empty for node type '{}'".format(
                    type_name)

            # get the version of the node type
            val['version'] = str(Version(type_def.get('version')))

            # get descrption
            val['description'] = ""
            descr = type_def.get('description')
            if descr is not None:
                val['description'] = descr

            # get properties and attributes
            val['attributes'] = get_attribute_definitions(
                type_def.get('attributes'))
            val['properties'] = get_property_definitions(
                type_def.get('properties'))

            # get valid_source_types
            val['valid_source_types'] = []
            sources = type_def.get('valid_source_types')
            if isinstance(sources, list) and all(
                    map(lambda x: isinstance(x, basestring), sources)):
                val['valid_source_types'] = sources

            capa_types[type_name] = val

    # store in the ditrit space
    for type_name, type_val in capa_types.items():
        linda_out('types/{}/capabilities/{}'.format(model_name, type_name),
                  type_val)
コード例 #6
0
def parse_artifact_types(toscayaml, model_name):
    """
      Create types internal representation
  """
    artifact_types = {}
    artifact_def = toscayaml.get('artifact_types')

    if type(artifact_def) is dict:
        for type_name, type_def in artifact_def.items():
            val = {}
            if type_name in artifact_types:
                val = artifact_types[type_name]
            val['name'] = type_name

            # get the list of parents
            val['types'] = get_types(artifact_def, type_name)
            if len(val['types']) > 0:
                val['derived_from'] = val['types'][0]
            else:
                print "ERROR : types list is empty for data type '{}'".format(
                    type_name)

            # get the version of the node type
            val['version'] = str(Version(type_def.get('version')))

            # get metadata
            val['metadata'] = {}
            metadata = type_def.get('metadata')
            if isinstance(metadata, dict) and all(
                    map(lambda x: isinstance(x, basestring),
                        metadata.values())):
                val['metadata'] = metadata

            # get descrption
            val['description'] = ""
            descr = type_def.get('description')
            if descr is not None:
                val['description'] = descr

            # get mime_type
            val['mime_type'] = type_def.get('mime_type')
            if not isinstance(val['mime_type'], basestring):
                val['mime_type'] = None

            # get file_exts
            exts = type_def.get('file_ext')
            if exts is None:
                exts = []

            if isinstance(exts, list) and all(
                    map(lambda x: isinstance(x, basestring), exts)):
                val['file_ext'] = exts
            else:
                print "Syntax Error : syntax error for file extensions '{}' for artifact type '{}'".format(
                    exts, type_name)
                val['file_ext'] = None

            # get properties
            val['properties'] = get_property_definitions(
                type_def.get('properties'))

            artifact_types[type_name] = val

    # store in the ditrit space
    for type_name, type_val in artifact_types.items():
        linda_out('types/{}/artifacts/{}'.format(model_name, type_name),
                  type_val)
コード例 #7
0
ファイル: nodetypes.py プロジェクト: ditrit/workflows
def parse_node_types(toscayaml, model_name):
    """
      Create types internal representation
  """
    node_types = {}
    # Part of the workflows defined in nodes have to be parsed before the part defined in relations.
    nt_def = toscayaml.get('node_types')
    if type(nt_def) is dict:
        for type_name, type_def in nt_def.items():
            val = {}
            if type_name in node_types:
                val = node_types[type_name]
            val['name'] = type_name

            # get the list of parents
            val['types'] = get_types(nt_def, type_name)
            if len(val['types']) > 0:
                val['derived_from'] = val['types'][0]
            else:
                print "ERROR : types list is empty for node type '{}'".format(
                    type_name)

            # for each declarative workflow defined in parents, get the nearest parent declaring the workflow
            types_for_workflows = {}
            for parent_type in val['types']:
                parent_def = nt_def.get(parent_type)
                if parent_def is None:
                    print "ERROR: node type '{}' is referenced but not defined".format(
                        parent_def)
                else:
                    workflows = parent_def.get('workflows')
                    if isinstance(workflows, dict):
                        for workflow_name in workflows.keys():
                            workflow_type = types_for_workflows.get(
                                workflow_name)
                            if workflow_type is None:
                                types_for_workflows[
                                    workflow_name] = parent_type
            val['types_for_workflows'] = types_for_workflows

            # is the node type derived from Compute ?
            val['isCompute'] = 'tosca.nodes.Compute' in val['types']

            # get the version of the node type
            val['version'] = str(Version(type_def.get('version')))

            # get metadata
            val['metadata'] = {}
            metadata = type_def.get('metadata')
            if isinstance(metadata, dict) and all(
                    map(lambda x: isinstance(x, basestring),
                        metadata.values())):
                val['metadata'] = metadata

            # get descrption
            val['description'] = ""
            descr = type_def.get('description')
            if descr is not None:
                val['description'] = descr

            # get properties and attributes
            val['attributes'] = get_attribute_definitions(
                type_def.get('attributes'))
            val['properties'] = get_property_definitions(
                type_def.get('properties'))

            # get requirements and capabilities
            val['requirements'] = get_requirement_definitions(
                type_def.get('requirements'))
            val['capabilities'] = get_capability_definitions(
                type_def.get('capabilities'))

            # get interfaces
            val['interfaces'] = get_interface_definitions(
                type_def.get('interfaces'))

            # get workflows
            val['workflows'] = type_def.get('workflows')

            node_types[type_name] = val

    # store in the ditrit space
    for type_name, type_val in node_types.items():
        linda_out('types/{}/nodes/{}'.format(model_name, type_name), type_val)