def api_dataset_structure_code(request):
    '''
        This API is needed just by another OKS and it is not meant to be public
        It's goal is to provide another OKS with the information needed to generate
        and migrate the models within a structure. TODO: Models that are external references
        are not included.
        The information is provided in the form of the code of the classes in a dictionary
        that groups them by APP/MODULE
        CAN BE CACHED
    '''
    DataSetStructure_UKCL = request.GET['UKCL']
    dss = DataSetStructure.retrieve_locally(
        urllib.parse.unquote(DataSetStructure_UKCL).replace("%2F", "/"))
    try:
        classes_code = dss.classes_code()
        return render(
            request,
            'knowledge_server/export.json', {
                'json': ApiResponse(ApiResponse.success, "",
                                    classes_code).json()
            },
            content_type="application/json")
    except Exception as ex:
        return render(
            request,
            'knowledge_server/export.json',
            {'json': ApiResponse(ApiResponse.failure, str(ex)).json()},
            content_type="application/json")
def api_datasets(request, DataSetStructure_UKCL=None, response_format=None):
    '''
        http://redmine.davide.galletti.name/issues/64
        all the released datasets of a given structure/type
        
        parameter:
        * response_format { 'XML' | 'JSON' }
        * DataSetStructure_UKCL: UKCL of the DataSetStructure encoded
        
        Implementation:
        # it fetches the structure from the DB, looks for all the datasets
        # with that structure; if it is not a view only those that are released; 
        CAN BE CACHED
    '''
    if not DataSetStructure_UKCL:
        DataSetStructure_UKCL = request.GET['UKCL']
    if response_format:
        ar = ApiResponse(format=response_format)
    else:
        # if not specified I get it from the request object
        ar = ApiResponse(request=request)
    dss = DataSetStructure.retrieve_locally(
        urllib.parse.unquote(DataSetStructure_UKCL).replace("%2F", "/"))

    # Now I need to get all the released DataSet of the DataSetStructure passed as a parameter
    if dss.is_a_view:
        # version_released is not relevant for views
        released_dataset = DataSet.objects.filter(dataset_structure=dss)
    else:
        released_dataset = DataSet.objects.filter(dataset_structure=dss,
                                                  version_released=True)
    serialized = ""
    comma = ""
    dataset_list = []
    for dataset in released_dataset:
        if ar.response_format == 'JSON':
            dataset_list.append(
                dataset.export(export_format="DICT",
                               force_external_reference=True))
        else:
            serialized += dataset.export(export_format=ar.response_format,
                                         force_external_reference=True)
    if ar.response_format == 'XML':
        ar.status = ApiResponse.success
        ar.content = "<DataSets>" + serialized + "</DataSets>"
        return render(request,
                      'knowledge_server/export.xml', {'xml': ar.xml()},
                      content_type="application/xhtml+xml")
    if ar.response_format == 'JSON':
        ar.content = {"DataSets": dataset_list}
        ar.status = ApiResponse.success
        return render(request,
                      'knowledge_server/export.json', {'json': ar.json()},
                      content_type="application/json")
def api_dataset_structure_code(request, DataSetStructure_UKCL):
    '''
        This API is needed just by another OKS and it is not meant to be public
        It's goal is to provide another OKS with the information needed to generate
        and migrate the models within a structure. TODO: Models that are external references
        are not included.
        The information is provided in the form of the code of the classes in a dictionary
        that groups them by APP/MODULE
        CAN BE CACHED
    '''
    dss = DataSetStructure.retrieve_locally(urllib.parse.unquote(DataSetStructure_UKCL).replace("%2F","/"))
    try:
        classes_code = dss.classes_code()
        return render(request, 'knowledge_server/export.json', {'json': ApiResponse(ApiResponse.success, "", classes_code).json()}, content_type="application/json")
    except Exception as ex:
        return render(request, 'knowledge_server/export.json', {'json': ApiResponse(ApiResponse.failure, str(ex)).json()}, content_type="application/json")
def api_datasets(request, DataSetStructure_UKCL, response_format):
    '''
        http://redmine.davide.galletti.name/issues/64
        all the released datasets of a given structure/type
        
        parameter:
        * response_format { 'XML' | 'JSON' }
        * DataSetStructure_UKCL: UKCL of the DataSetStructure encoded
        
        Implementation:
        # it fetches the structure from the DB, looks for all the datasets
        # with that structure; if it is not a view only those that are released; 
        CAN BE CACHED
    '''
    ar = ApiResponse()
    response_format = response_format.upper()
    dss = DataSetStructure.retrieve_locally(urllib.parse.unquote(DataSetStructure_UKCL).replace("%2F","/"))
    
    # Now I need to get all the released DataSet of the DataSetStructure passed as a parameter
    if dss.is_a_view:
        # version_released is not relevant for views
        released_dataset = DataSet.objects.filter(dataset_structure = dss)
    else:
        released_dataset = DataSet.objects.filter(dataset_structure = dss, version_released=True)
    serialized = ""
    comma = ""
    dataset_list = []
    for dataset in released_dataset:
        if response_format == 'JSON':
            dataset_list.append(dataset.export(export_format = "DICT", force_external_reference=True))
        else:
            serialized += dataset.export(export_format = response_format, force_external_reference=True)
    if response_format == 'XML':
        ar.status = ApiResponse.success
        ar.content = "<DataSets>" + serialized + "</DataSets>"
        return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml")
    if response_format == 'JSON':
        ar.content = { "DataSets": dataset_list }
        ar.status = ApiResponse.success 
        return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
示例#5
0
def forwards_func(apps, schema_editor):
    org_ks = {
        "Organization": {
            "name": "A test Organization",
            "website": "http://new_org.example.com",
            "description": ""
        },
        "KnowledgeServer": {
            "name": "A test OKS.",
            "netloc": "test.thekoa.org",
            "description": "It has some datasets and structures",
            "html_home": "",
            "html_disclaimer": ""
        }
    }
    KnowledgeServer.create_this_ks(org_ks)
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dssContinentState = DataSetStructure()
    dssContinentState.name = "Test Continent-SubContinent-State"
    dssContinentState.SetNotNullFields()
    dssContinentState.save()

    mmContinent = dssContinentState.create_model_metadata(
        "Continent", "test1", "name")
    mmSubContinent = dssContinentState.create_model_metadata(
        "SubContinent", "test1", "name")
    mmState = dssContinentState.create_model_metadata("State", "test1", "name")

    # It creates ModelMetadata and a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized.
    KnowledgeServer.register_models([mmContinent, mmSubContinent, mmState])

    # it creates the root node of the dataset structure
    dssContinentState.root_model_metadata(mmContinent)
    # and child nodes for two attributes/fields
    dssContinentState.root_node.children_nodes_for(
        ["subcontinent_set", "state_set"], this_ks_d.netloc)
    dssContinentState.save()

    dss_dss = DataSetStructure.get_from_name(
        DataSetStructure.dataset_structure_DSN)

    ds = DataSet(
        description=
        'DataSet for data set structure "Continent-SubContinent-State"',
        knowledge_server=this_ks_d,
        dataset_structure=dss_dss,
        root=dssContinentState,
        version_major=0,
        version_minor=1,
        version_patch=0)
    ds.save()
    ds.set_released()

    europe = Continent()
    europe.name = "Europe"
    europe.save()
    asia = Continent()
    asia.name = "Asia"
    asia.save()
    south_europe = SubContinent()
    south_europe.name = "South Europe"
    south_europe.continent = europe
    south_europe.save()
    central_europe = SubContinent()
    central_europe.name = "Central Europe"
    central_europe.continent = europe
    central_europe.save()
    italy = State()
    italy.name = "Italy"
    italy.sub_continent = south_europe
    italy.continent = europe
    italy.save()
    spain = State()
    spain.name = "Spain"
    spain.sub_continent = south_europe
    spain.continent = europe
    spain.save()
    germany = State()
    germany.name = "Germany"
    germany.sub_continent = central_europe
    germany.continent = europe
    germany.save()

    ds = DataSet(knowledge_server=this_ks_d,
                 dataset_structure=dssContinentState,
                 root=europe,
                 description="Europe",
                 version_major=0,
                 version_minor=1,
                 version_patch=0,
                 version_description="")
    ds.save()
    ds.set_released()
    ds = DataSet(knowledge_server=this_ks_d,
                 dataset_structure=dssContinentState,
                 root=asia,
                 description="Asia",
                 version_major=0,
                 version_minor=1,
                 version_patch=0,
                 version_description="")
    ds.save()
    ds.set_released()
示例#6
0
def forwards_func(apps, schema_editor):
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dss_workflow = DataSetStructure(name='Workflow')
    dss_workflow.description = 'Workflow-Method-Attribute-PermissionStatement'
    dss_workflow.SetNotNullFields()
    dss_workflow.save()

    mm_workflow = Workflow().get_model_metadata(db_alias='default')
    models_for_dss_workflow = [
        {
            "name": "Attribute",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "Widget",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "AttributeInAMethod",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "PermissionStatement",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
    ]
    mm_attribute, mm_widget, mm_attribute_in_a_method, mm_permission_statement, mm_permission_holder = dss_workflow.create_many_model_metadata(
        models_for_dss_workflow)

    mm_workflow_method = dss_workflow.create_model_metadata(
        name="WorkflowMethod", module="ap")
    mm_attribute = dss_workflow.create_model_metadata(name="Attribute",
                                                      module="ap",
                                                      name_field="",
                                                      description_field="")
    mm_widget = dss_workflow.create_model_metadata(name="Widget",
                                                   module="ap",
                                                   name_field="",
                                                   description_field="")
    mm_attribute_in_a_method = dss_workflow.create_model_metadata(
        name="AttributeInAMethod",
        module="ap",
        name_field="",
        description_field="")
    mm_permission_statement = dss_workflow.create_model_metadata(
        name="PermissionStatement",
        module="ap",
        name_field="",
        description_field="")
    # check quale DSS
    mm_attribute_type = dss_workflow.create_model_metadata(
        name="AttributeType", module="ap")
    mm_attribute_group = dss_workflow.create_model_metadata(
        name="AttributeGroup", module="ap")
    # WorkflowsMethods è un many to many through, va fatto ?
    WorkflowTransition
    Application
    ModelMetadataSearch
    ApplicationStructureNodeSearch
    AttributeInASearch
    KSUser
    KSRole
    KSGroup
    # check quale DSS; sul diagramma sul foglio?

    # It creates a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized.
    KnowledgeServer.register_models([
        mm_workflow_method, mm_attribute_in_a_method, mm_permission_statement,
        mm_attribute, mm_attribute_type, mm_attribute_group, mm_widget,
        mm_permission_holder
    ])

    # it creates the root node from the ModelMetadata provided
    dss_workflow.root_model_metadata(mm_workflow)
    # child nodes for two attributes/fields
    dss_workflow.root_node.children_nodes_for(["methods"], this_ks_d.netloc)
    dss_workflow.root_node.children_external_references_nodes_for(
        ["type"], this_ks_d.netloc)

    method_node = dss_workflow.root_node.child_nodes.all()[0]
    method_node.children_external_references_nodes_for(
        ["initial_statuses", "final_status"], this_ks_d.netloc)
    method_node.children_nodes_for(["attributeinamethod_set", "permission"],
                                   this_ks_d.netloc)
    attribute_in_a_method_node = [
        cn for cn in method_node.child_nodes.all()
        if cn.attribute == "attributeinamethod_set"
    ][0]
    attribute_in_a_method_node.children_external_references_nodes_for(
        ["attribute", "custom_widget"], this_ks_d.netloc)

    permission_node = [
        cn for cn in method_node.child_nodes.all()
        if cn.attribute == "permission"
    ][0]
    permission_node.children_external_references_nodes_for(
        ["permission_holder"], this_ks_d.netloc)

    dss_workflow.save()

    dss_dss = DataSetStructure.get_from_name(
        DataSetStructure.dataset_structure_DSN)
    ds = DataSet(
        description=
        'DataSet for data set structure "Workflow-Method-Attribute-PermissionStatement"',
        knowledge_server=this_ks_d,
        dataset_structure=dss_dss,
        root=dss_workflow,
        version_major=0,
        version_minor=1,
        version_patch=0,
        version_description="")
    ds.save()
    ds.set_released()

    # Application now is on its own dataset structure so we don't create it and just use the shallow one
    mm_application = dss_workflow.create_model_metadata(
        name='Application',
        module='ap',
        name_field="name",
        description_field="description")
    KnowledgeServer.register_models([mm_application])
def debug(request):
    '''
    created to debug code

    Args:
        request: 
    '''
    UKCL = request.GET["UKCL"]
    return HttpResponse("OK: " + UKCL)
    try:
        from django.core import management
        #         management.call_command('migrate', "--database=materialized", interactive=False)
        #         management.call_command('migrate', 'ap', interactive=False)
        #         management.call_command('migrate', interactive=False)
        management.call_command('migrate', 'ap', interactive=False)
        # funziona?        management.call_command('migrate', "knowledge_server 0003_initial_data --database=materialized", interactive=False)
        return HttpResponse("OK")

        import scrapy

        class DmozItem(scrapy.Item):
            title = scrapy.Field()
            link = scrapy.Field()
            desc = scrapy.Field()

        class DmozSpider(scrapy.Spider):
            name = "dmoz"
            allowed_domains = ["dmoz.org"]
            start_urls = [
                "http://www.dmoz.org/Computers/Programming/Languages/Python/",
            ]

            def parse(self, response):
                for href in response.css(
                        "ul.directory.dir-col > li > a::attr('href')"):
                    url = response.urljoin(href.extract())
                    yield scrapy.Request(url, callback=self.parse_dir_contents)

            def parse_dir_contents(self, response):
                for sel in response.xpath('//ul/li'):
                    item = DmozItem()
                    item['title'] = sel.xpath('a/text()').extract()
                    item['link'] = sel.xpath('a/@href').extract()
                    item['desc'] = sel.xpath('text()').extract()
                    yield item

        return HttpResponse('OK')

        ar = ApiResponse()
        ar.content = {"DataSet": "Versions"}
        ar.status = ApiResponse.success
        return HttpResponse(ar.json(), content_type="application/json")

        # TODO: AGGIORNARE SU STACKOVERFLOW: http://stackoverflow.com/questions/8784400/clearing-specific-cache-in-django

        from licenses.models import License
        db_alias = 'default'
        ccbysa40 = License.objects.using(db_alias).get(
            short_name="CC-BY-SA-4.0")
        dssModelMetadataFields = DataSetStructure.get_from_name(
            DataSetStructure.model_metadata_DSN, db_alias)
        dssDataSetStructureStructureNode = DataSetStructure.get_from_name(
            DataSetStructure.dataset_structure_DSN, db_alias)
        dssOrganizationKS = DataSetStructure.get_from_name(
            DataSetStructure.organization_DSN, db_alias)
        for ds in DataSet.objects.using(db_alias).filter(
                dataset_structure=dssModelMetadataFields):
            ds.licenses.add(ccbysa40)
            ds.save()
        for ds in DataSet.objects.using(db_alias).filter(
                dataset_structure=dssDataSetStructureStructureNode):
            ds.licenses.add(ccbysa40)
            ds.save()
        for ds in DataSet.objects.using(db_alias).filter(
                dataset_structure=dssOrganizationKS):
            ds.licenses.add(ccbysa40)
            ds.save()
        db_alias = 'materialized'
        ccbysa40 = License.objects.using(db_alias).get(
            short_name="CC-BY-SA-4.0")
        dssModelMetadataFields = DataSetStructure.get_from_name(
            DataSetStructure.model_metadata_DSN, db_alias)
        dssDataSetStructureStructureNode = DataSetStructure.get_from_name(
            DataSetStructure.dataset_structure_DSN, db_alias)
        dssOrganizationKS = DataSetStructure.get_from_name(
            DataSetStructure.organization_DSN, db_alias)
        for ds in DataSet.objects.using(db_alias).filter(
                dataset_structure=dssModelMetadataFields):
            ds.licenses.add(ccbysa40)
            ds.save()
        for ds in DataSet.objects.using(db_alias).filter(
                dataset_structure=dssDataSetStructureStructureNode):
            ds.licenses.add(ccbysa40)
            ds.save()
        for ds in DataSet.objects.using(db_alias).filter(
                dataset_structure=dssOrganizationKS):
            ds.licenses.add(ccbysa40)
            ds.save()
        return HttpResponse("OK ")

        from django.core.cache import cache
        from django.utils.cache import get_cache_key, _generate_cache_header_key
        from django.utils.encoding import escape_uri_path
        from django.http import HttpRequest

        new_request = HttpRequest()
        new_request.path = 'root.beta.thekoa.org/oks/api/ks_info/JSON/'  ##this path works
        new_request.META['SERVER_PORT'] = request.META['SERVER_PORT']
        new_request.META['SERVER_NAME'] = request.META['SERVER_NAME']

        key = _generate_cache_header_key("", new_request)
        if cache.has_key(key):
            cache.delete(key)

        full_path = 'http://root.beta.thekoa.org/oks/api/datasets/http%253A%252F%252Froot.beta.thekoa.org%252Fknowledge_server%252FDataSetStructure%252F4/JSON/'
        import hashlib
        from django.utils.encoding import force_bytes, iri_to_uri
        from django.utils.cache import _i18n_cache_key_suffix
        # code from _generate_cache_header_key
        url = hashlib.md5(force_bytes(iri_to_uri(full_path)))
        cache_key = 'views.decorators.cache.cache_header.%s.%s' % (
            "", url.hexdigest())
        key = _i18n_cache_key_suffix(request, cache_key)
        if cache.has_key(key):
            cache.delete(key)

        return HttpResponse("OK ")
        #         d = DataSet.objects.get(pk=1)
        #         s = d.shallow_structure()
        #         rct = d.root_content_type
        #
        #
        #         for structure_child_node in s.root_node.child_nodes.all():
        #             mm = structure_child_node.sn_model_metadata(d)
        #             print(mm.name)

        dssContinentState = DataSetStructure()
        dssContinentState.name = "Test Continent-SubContinent-State"
        dssContinentState.SetNotNullFields()
        dssContinentState.save()

        return HttpResponse("OK ")
    except Exception as ex:
        logger.error("views.debug: " + str(ex))
        return HttpResponse(str(ex))
def forwards_func(apps, schema_editor):
    org_ks={
      "Organization": {"name": "A test Organization", "website": "http://new_org.example.com", "description": ""}, 
      "KnowledgeServer": {"name": "A test OKS.", "netloc": "test.thekoa.org", "description": "It has some datasets and structures", "html_home": "", "html_disclaimer": ""}
     }
    KnowledgeServer.create_this_ks(org_ks)
    this_ks_d = KnowledgeServer.this_knowledge_server('default')
    
    dssContinentState=DataSetStructure()
    dssContinentState.name="Test Continent-SubContinent-State";
    dssContinentState.SetNotNullFields()
    dssContinentState.save()

    mmContinent=dssContinentState.create_model_metadata(name="Continent",module="test1",name_field="name",description_field="")
    mmSubContinent=dssContinentState.create_model_metadata(name="SubContinent",module="test1",name_field="name",description_field="")
    mmState=dssContinentState.create_model_metadata(name="State",module="test1",name_field="name",description_field="")
    
    # It creates ModelMetadata and a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized. 
    KnowledgeServer.register_models([mmContinent, mmSubContinent, mmState])
     
    # it creates the root node from the ModelMetadata provided
    dssContinentState.root_model_metadata(mmContinent)
    # child nodes for two attributes/fields
    dssContinentState.root_node.children_for(["subcontinent_set", "state_set"], this_ks_d.netloc)
    dssContinentState.save()
    
    dss_dss = DataSetStructure.get_from_name(DataSetStructure.dataset_structure_DSN)
    
    ds = DataSet(description='DataSet for data set structure "Continent-SubContinent-State"', knowledge_server=this_ks_d, dataset_structure=dss_dss, root=dssContinentState, version_major=0, version_minor=1, version_patch=0, version_description="")
    ds.save();
    ds.set_released()
        
    europe = Continent();europe.name="Europe";europe.save()
    asia = Continent();asia.name="Asia";asia.save()
    south_europe=SubContinent();south_europe.name="South Europe";south_europe.continent=europe;south_europe.save()
    central_europe=SubContinent();central_europe.name="Central Europe";central_europe.continent=europe;central_europe.save()
    italy=State();italy.name="Italy";italy.sub_continent=south_europe;italy.continent=europe;italy.save()
    spain=State();spain.name="Spain";spain.sub_continent=south_europe;spain.continent=europe;spain.save()
    germany=State();germany.name="Germany";germany.sub_continent=central_europe;germany.continent=europe;germany.save()
    
    ds = DataSet(knowledge_server=this_ks_d,dataset_structure=dssContinentState,root=europe,
                 description="Europe",version_major=0,version_minor=1,version_patch=0,version_description="")
    ds.save();ds.set_released(); 
    ds = DataSet(knowledge_server=this_ks_d,dataset_structure=dssContinentState,root=asia,
                 description="Asia",version_major=0,version_minor=1,version_patch=0,version_description="")
    ds.save();ds.set_released(); 
def forwards_func(apps, schema_editor):
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dss_workflow = DataSetStructure(name='Workflow')
    dss_workflow.description = 'Workflow-Method-Attribute-PermissionStatement'
    dss_workflow.SetNotNullFields()
    dss_workflow.save()

    mm_workflow = Workflow().get_model_metadata(db_alias='default')
    mm_workflow_method = dss_workflow.create_model_metadata(name="WorkflowMethod",module="ap",name_field="name",description_field="description")
    mm_attribute = dss_workflow.create_model_metadata(name="Attribute",module="ap",name_field="",description_field="")
    mm_widget = dss_workflow.create_model_metadata(name="Widget",module="ap",name_field="",description_field="")
    mm_attribute_in_a_method = dss_workflow.create_model_metadata(name="AttributeInAMethod",module="ap",name_field="",description_field="")
    mm_permission_statement = dss_workflow.create_model_metadata(name="PermissionStatement",module="ap",name_field="",description_field="")
    mm_permission_holder = dss_workflow.create_model_metadata(name="PermissionHolder",module="ap",name_field="",description_field="")

    # It creates a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized. 
    KnowledgeServer.register_models([mm_workflow_method, mm_attribute_in_a_method, mm_permission_statement, mm_attribute, mm_widget, mm_permission_holder])

    # it creates the root node from the ModelMetadata provided
    dss_workflow.root_model_metadata(mm_workflow)
    # child nodes for two attributes/fields
    dss_workflow.root_node.children_nodes_for(["methods"], this_ks_d.netloc)
    dss_workflow.root_node.children_external_references_nodes_for( [ "type" ], this_ks_d.netloc )

    method_node = dss_workflow.root_node.child_nodes.all()[0]
    method_node.children_external_references_nodes_for(["initial_statuses", "final_status"], this_ks_d.netloc)
    method_node.children_nodes_for(["attributeinamethod_set", "permission"], this_ks_d.netloc)
    attribute_in_a_method_node = [cn for cn in method_node.child_nodes.all() if cn.attribute == "attributeinamethod_set"][0]
    attribute_in_a_method_node.children_external_references_nodes_for( [ "attribute", "custom_widget" ], this_ks_d.netloc )

    permission_node = [cn for cn in method_node.child_nodes.all() if cn.attribute == "permission"][0]
    permission_node.children_external_references_nodes_for( [ "permission_holder" ], this_ks_d.netloc )

    dss_workflow.save()
    
    dss_dss = DataSetStructure.get_from_name(DataSetStructure.dataset_structure_DSN)
    ds = DataSet(description='DataSet for data set structure "Workflow-Method-Attribute-PermissionStatement"', knowledge_server=this_ks_d, dataset_structure=dss_dss, root=dss_workflow, version_major=0, version_minor=1, version_patch=0, version_description="")
    ds.save()
    ds.set_released()
        

    # Application now is on its own dataset structure so we don't create it and just use the shallow one
    mm_application = dss_workflow.create_model_metadata(name='Application',module='ap',name_field="name",description_field="description")
    KnowledgeServer.register_models([mm_application])