Пример #1
0
def api_dataset_view(request, DataSet_UKCL, root_id, response_format):
    '''
        it returns the data of the istance with pk=root_id in the dataset (which is a view)
        if we are browsing a view there is not just one single root that we can explore
        but a list of instances that match the criteria; root_id tells us which one to browse
        CAN BE CACHED
    '''
    response_format = response_format.upper()
    DataSet_UKCL_decoded = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/")
    dataset = DataSet.retrieve_locally(DataSet_UKCL_decoded)
    actual_instance = ""
    actual_instance_json = ""
    # this dataset is a view; I shall use root_id to retrieve the actual instance
    module_name = dataset.dataset_structure.root_node.model_metadata.module
    dataset_uri = KsUrl(DataSet_UKCL_decoded)
    actual_instance_class = OrmWrapper.load_class(dataset_uri.netloc, module_name, dataset.dataset_structure.root_node.model_metadata.name) 
    actual_instance = actual_instance_class.objects.get(pk=root_id)
    
    if response_format == 'HTML' or response_format == 'BROWSE':
        actual_instance_json = '{' + actual_instance.serialize(dataset.dataset_structure.root_node, export_format='json', exported_instances = []) + '}'
    if response_format == 'JSON':
        ar = ApiResponse()
        ar.content = { "DataSet": dataset.export(export_format = 'DICT') }
        ar.status = ApiResponse.success
        return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
    if response_format == 'XML':
        ar = ApiResponse()
        ar.status = ApiResponse.success
        ar.content = dataset.export(export_format = response_format)
        return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml")
    if response_format == 'HTML' or response_format == 'BROWSE':
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(request, {'dataset': dataset, 'actual_instance': actual_instance, 'actual_instance_json': actual_instance_json, 'sn': dataset.dataset_structure.root_node, 'DataSet_UKCL': DataSet_UKCL, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True)})
        return render_to_response('knowledge_server/browse_dataset.html', context_instance=cont)
Пример #2
0
def forwards_func(apps, schema_editor):
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dss_workflow = DataSetStructure(name='Workflow')
    dss_workflow.description = 'Workflow-Method-Attribute-PermissionStatement'
    dss_workflow.SetNotNullFields()
    dss_workflow.save()

    mm_workflow = Workflow().get_model_metadata(db_alias='default')
    mm_workflow_method = dss_workflow.create_model_metadata(name="WorkflowMethod",module="ap",name_field="name",description_field="description")
    mm_attribute = dss_workflow.create_model_metadata(name="Attribute",module="ap",name_field="",description_field="")
    mm_widget = dss_workflow.create_model_metadata(name="Widget",module="ap",name_field="",description_field="")
    mm_attribute_in_a_method = dss_workflow.create_model_metadata(name="AttributeInAMethod",module="ap",name_field="",description_field="")
    mm_permission_statement = dss_workflow.create_model_metadata(name="PermissionStatement",module="ap",name_field="",description_field="")
    mm_permission_holder = dss_workflow.create_model_metadata(name="PermissionHolder",module="ap",name_field="",description_field="")

    # It creates a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized. 
    KnowledgeServer.register_models([mm_workflow_method, mm_attribute_in_a_method, mm_permission_statement, mm_attribute, mm_widget, mm_permission_holder])

    # it creates the root node from the ModelMetadata provided
    dss_workflow.root_model_metadata(mm_workflow)
    # child nodes for two attributes/fields
    dss_workflow.root_node.children_nodes_for(["methods"], this_ks_d.netloc)
    dss_workflow.root_node.children_external_references_nodes_for( [ "type" ], this_ks_d.netloc )

    method_node = dss_workflow.root_node.child_nodes.all()[0]
    method_node.children_external_references_nodes_for(["initial_statuses", "final_status"], this_ks_d.netloc)
    method_node.children_nodes_for(["attributeinamethod_set", "permission"], this_ks_d.netloc)
    attribute_in_a_method_node = [cn for cn in method_node.child_nodes.all() if cn.attribute == "attributeinamethod_set"][0]
    attribute_in_a_method_node.children_external_references_nodes_for( [ "attribute", "custom_widget" ], this_ks_d.netloc )

    permission_node = [cn for cn in method_node.child_nodes.all() if cn.attribute == "permission"][0]
    permission_node.children_external_references_nodes_for( [ "permission_holder" ], this_ks_d.netloc )

    dss_workflow.save()
    
    dss_dss = DataSetStructure.get_from_name(DataSetStructure.dataset_structure_DSN)
    ds = DataSet(description='DataSet for data set structure "Workflow-Method-Attribute-PermissionStatement"', knowledge_server=this_ks_d, dataset_structure=dss_dss, root=dss_workflow, version_major=0, version_minor=1, version_patch=0, version_description="")
    ds.save()
    ds.set_released()
        

    # Application now is on its own dataset structure so we don't create it and just use the shallow one
    mm_application = dss_workflow.create_model_metadata(name='Application',module='ap',name_field="name",description_field="description")
    KnowledgeServer.register_models([mm_application])
Пример #3
0
def api_dataset_view(request):
    '''
        it returns the data of the istance with pk=root_id in the dataset (which is a view)
        if we are browsing a view there is not just one single root that we can explore
        but a list of instances that match the criteria; root_id tells us which one to browse
        CAN BE CACHED
    '''
    DataSet_UKCL = request.GET['UKCL']
    root_id = request.GET['id']
    ar = ApiResponse(request=request)

    dataset = DataSet.retrieve_locally(DataSet_UKCL)
    actual_instance = ""
    actual_instance_json = ""
    # this dataset is a view; I shall use root_id to retrieve the actual instance
    module_name = dataset.dataset_structure.root_node.model_metadata.module
    dataset_uri = KsUrl(DataSet_UKCL)
    actual_instance_class = OrmWrapper.load_class(
        dataset_uri.netloc, module_name,
        dataset.dataset_structure.root_node.model_metadata.name)
    actual_instance = actual_instance_class.objects.get(pk=root_id)

    if ar.response_format == 'HTML' or ar.response_format == 'BROWSE':
        actual_instance_json = '{' + actual_instance.serialize(
            dataset.dataset_structure.root_node,
            export_format='json',
            exported_instances=[]) + '}'
    if ar.response_format == 'JSON':
        ar.content = {"DataSet": dataset.export(export_format='DICT')}
        ar.status = ApiResponse.success
        return render(request,
                      'knowledge_server/export.json', {'json': ar.json()},
                      content_type="application/json")
    if ar.response_format == 'XML':
        ar.status = ApiResponse.success
        ar.content = dataset.export(export_format=ar.response_format)
        return render(request,
                      'knowledge_server/export.xml', {'xml': ar.xml()},
                      content_type="application/xhtml+xml")
    if ar.response_format == 'HTML' or ar.response_format == 'BROWSE':
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(
            request, {
                'dataset': dataset,
                'actual_instance': actual_instance,
                'actual_instance_json': actual_instance_json,
                'sn': dataset.dataset_structure.root_node,
                'DataSet_UKCL': DataSet_UKCL,
                'this_ks': this_ks,
                'this_ks_encoded_url': this_ks.url(True)
            })
        return render_to_response('knowledge_server/browse_dataset.html',
                                  context_instance=cont)
Пример #4
0
def forwards_func(apps, schema_editor):
    org_ks = {
        "Organization": {
            "name": "A test Organization",
            "website": "http://new_org.example.com",
            "description": ""
        },
        "KnowledgeServer": {
            "name": "A test OKS.",
            "netloc": "test.thekoa.org",
            "description": "It has some datasets and structures",
            "html_home": "",
            "html_disclaimer": ""
        }
    }
    KnowledgeServer.create_this_ks(org_ks)
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dssContinentState = DataSetStructure()
    dssContinentState.name = "Test Continent-SubContinent-State"
    dssContinentState.SetNotNullFields()
    dssContinentState.save()

    mmContinent = dssContinentState.create_model_metadata(
        "Continent", "test1", "name")
    mmSubContinent = dssContinentState.create_model_metadata(
        "SubContinent", "test1", "name")
    mmState = dssContinentState.create_model_metadata("State", "test1", "name")

    # It creates ModelMetadata and a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized.
    KnowledgeServer.register_models([mmContinent, mmSubContinent, mmState])

    # it creates the root node of the dataset structure
    dssContinentState.root_model_metadata(mmContinent)
    # and child nodes for two attributes/fields
    dssContinentState.root_node.children_nodes_for(
        ["subcontinent_set", "state_set"], this_ks_d.netloc)
    dssContinentState.save()

    dss_dss = DataSetStructure.get_from_name(
        DataSetStructure.dataset_structure_DSN)

    ds = DataSet(
        description=
        'DataSet for data set structure "Continent-SubContinent-State"',
        knowledge_server=this_ks_d,
        dataset_structure=dss_dss,
        root=dssContinentState,
        version_major=0,
        version_minor=1,
        version_patch=0)
    ds.save()
    ds.set_released()

    europe = Continent()
    europe.name = "Europe"
    europe.save()
    asia = Continent()
    asia.name = "Asia"
    asia.save()
    south_europe = SubContinent()
    south_europe.name = "South Europe"
    south_europe.continent = europe
    south_europe.save()
    central_europe = SubContinent()
    central_europe.name = "Central Europe"
    central_europe.continent = europe
    central_europe.save()
    italy = State()
    italy.name = "Italy"
    italy.sub_continent = south_europe
    italy.continent = europe
    italy.save()
    spain = State()
    spain.name = "Spain"
    spain.sub_continent = south_europe
    spain.continent = europe
    spain.save()
    germany = State()
    germany.name = "Germany"
    germany.sub_continent = central_europe
    germany.continent = europe
    germany.save()

    ds = DataSet(knowledge_server=this_ks_d,
                 dataset_structure=dssContinentState,
                 root=europe,
                 description="Europe",
                 version_major=0,
                 version_minor=1,
                 version_patch=0,
                 version_description="")
    ds.save()
    ds.set_released()
    ds = DataSet(knowledge_server=this_ks_d,
                 dataset_structure=dssContinentState,
                 root=asia,
                 description="Asia",
                 version_major=0,
                 version_minor=1,
                 version_patch=0,
                 version_description="")
    ds.save()
    ds.set_released()
Пример #5
0
def forwards_func(apps, schema_editor):
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dss_workflow = DataSetStructure(name='Workflow')
    dss_workflow.description = 'Workflow-Method-Attribute-PermissionStatement'
    dss_workflow.SetNotNullFields()
    dss_workflow.save()

    mm_workflow = Workflow().get_model_metadata(db_alias='default')
    models_for_dss_workflow = [
        {
            "name": "Attribute",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "Widget",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "AttributeInAMethod",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "PermissionStatement",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
    ]
    mm_attribute, mm_widget, mm_attribute_in_a_method, mm_permission_statement, mm_permission_holder = dss_workflow.create_many_model_metadata(
        models_for_dss_workflow)

    mm_workflow_method = dss_workflow.create_model_metadata(
        name="WorkflowMethod", module="ap")
    mm_attribute = dss_workflow.create_model_metadata(name="Attribute",
                                                      module="ap",
                                                      name_field="",
                                                      description_field="")
    mm_widget = dss_workflow.create_model_metadata(name="Widget",
                                                   module="ap",
                                                   name_field="",
                                                   description_field="")
    mm_attribute_in_a_method = dss_workflow.create_model_metadata(
        name="AttributeInAMethod",
        module="ap",
        name_field="",
        description_field="")
    mm_permission_statement = dss_workflow.create_model_metadata(
        name="PermissionStatement",
        module="ap",
        name_field="",
        description_field="")
    # check quale DSS
    mm_attribute_type = dss_workflow.create_model_metadata(
        name="AttributeType", module="ap")
    mm_attribute_group = dss_workflow.create_model_metadata(
        name="AttributeGroup", module="ap")
    # WorkflowsMethods è un many to many through, va fatto ?
    WorkflowTransition
    Application
    ModelMetadataSearch
    ApplicationStructureNodeSearch
    AttributeInASearch
    KSUser
    KSRole
    KSGroup
    # check quale DSS; sul diagramma sul foglio?

    # It creates a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized.
    KnowledgeServer.register_models([
        mm_workflow_method, mm_attribute_in_a_method, mm_permission_statement,
        mm_attribute, mm_attribute_type, mm_attribute_group, mm_widget,
        mm_permission_holder
    ])

    # it creates the root node from the ModelMetadata provided
    dss_workflow.root_model_metadata(mm_workflow)
    # child nodes for two attributes/fields
    dss_workflow.root_node.children_nodes_for(["methods"], this_ks_d.netloc)
    dss_workflow.root_node.children_external_references_nodes_for(
        ["type"], this_ks_d.netloc)

    method_node = dss_workflow.root_node.child_nodes.all()[0]
    method_node.children_external_references_nodes_for(
        ["initial_statuses", "final_status"], this_ks_d.netloc)
    method_node.children_nodes_for(["attributeinamethod_set", "permission"],
                                   this_ks_d.netloc)
    attribute_in_a_method_node = [
        cn for cn in method_node.child_nodes.all()
        if cn.attribute == "attributeinamethod_set"
    ][0]
    attribute_in_a_method_node.children_external_references_nodes_for(
        ["attribute", "custom_widget"], this_ks_d.netloc)

    permission_node = [
        cn for cn in method_node.child_nodes.all()
        if cn.attribute == "permission"
    ][0]
    permission_node.children_external_references_nodes_for(
        ["permission_holder"], this_ks_d.netloc)

    dss_workflow.save()

    dss_dss = DataSetStructure.get_from_name(
        DataSetStructure.dataset_structure_DSN)
    ds = DataSet(
        description=
        'DataSet for data set structure "Workflow-Method-Attribute-PermissionStatement"',
        knowledge_server=this_ks_d,
        dataset_structure=dss_dss,
        root=dss_workflow,
        version_major=0,
        version_minor=1,
        version_patch=0,
        version_description="")
    ds.save()
    ds.set_released()

    # Application now is on its own dataset structure so we don't create it and just use the shallow one
    mm_application = dss_workflow.create_model_metadata(
        name='Application',
        module='ap',
        name_field="name",
        description_field="description")
    KnowledgeServer.register_models([mm_application])
Пример #6
0
def api_dataset_info(request):
    '''
        #52 
        
        Parameters:
        * response_format { 'XML' | 'JSON' | 'HTML' = 'BROWSE' }
        * DataSet_UKCL: UKCL of the DataSet 
        
        Implementation:
        it fetches the DataSet, then the list of all that share the same root
        it returns DataSet.export(response_format) and for each on the above list:
            the UKCL of the DataSet
            the version status {working | released | obsolete}
            the version number (e.g. 0.1.0)
            the version date
            the version description
            other version metadata
        CAN BE CACHED
    '''
    DataSet_UKCL = request.GET['UKCL']
    ar = ApiResponse(request=request)
    DataSet_UKCL_unquoted = urllib.parse.unquote(DataSet_UKCL).replace(
        "%2F", "/")
    dataset = DataSet.retrieve_locally(DataSet_UKCL_unquoted)
    all_versions = DataSet.objects.filter(first_version=dataset.first_version)
    all_versions_serialized = ""
    all_versions_list = []
    if ar.response_format != 'HTML' and ar.response_format != 'BROWSE':
        for v in all_versions:
            if ar.response_format == 'JSON':
                # note I am using DICT as a response_format so that I can merge the dict (.update) and then convert it to json
                all_versions_list.append(
                    v.export(export_format='DICT',
                             force_external_reference=True))
            else:
                all_versions_serialized += v.export(
                    export_format=ar.response_format,
                    force_external_reference=True)
    if ar.response_format == 'XML':
        ar.status = ApiResponse.success
        ar.content = "<DataSet>" + dataset.export(
            export_format=ar.response_format, force_external_reference=True
        ) + "</DataSet><Versions>" + all_versions_serialized + "</Versions>"
        return render(request,
                      'knowledge_server/export.xml', {'xml': ar.xml()},
                      content_type="application/xhtml+xml")
    if ar.response_format == 'JSON':
        ar.content = {
            "DataSet":
            dataset.export(export_format="DICT",
                           force_external_reference=True),
            "Versions":
            all_versions_list
        }
        ar.status = ApiResponse.success
        return HttpResponse(ar.json(), content_type="application/json")


#         return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
    if ar.response_format == 'HTML' or ar.response_format == 'BROWSE':
        if dataset.dataset_structure.is_a_view:
            instances = dataset.get_instances()
        else:
            instances = []
            instances.append(dataset.root)
        all_versions_with_instances = []
        for v in all_versions:
            if v.UKCL != dataset.UKCL:
                version_with_instance = {}
                version_with_instance['dataset'] = v
                version_with_instance['root'] = []
                # views have no version by themselves; only their components have and they can be different
                # so if we are here we are not in a view hence there is just one instance:
                #         I get root and not .get_instances()
                version_with_instance['root'].append(v.root)
                all_versions_with_instances.append(version_with_instance)
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(
            request, {
                'DataSet_UKCL': DataSet_UKCL,
                'dataset': dataset,
                'all_versions_with_instances': all_versions_with_instances,
                'ks': dataset.knowledge_server,
                'instances': instances,
                'this_ks': this_ks,
                'this_ks_encoded_url': this_ks.url(True)
            })
        return render_to_response('knowledge_server/api_dataset_info.html',
                                  context_instance=cont)
def forwards_func(apps, schema_editor):
    org_ks={
      "Organization": {"name": "A test Organization", "website": "http://new_org.example.com", "description": ""}, 
      "KnowledgeServer": {"name": "A test OKS.", "netloc": "test.thekoa.org", "description": "It has some datasets and structures", "html_home": "", "html_disclaimer": ""}
     }
    KnowledgeServer.create_this_ks(org_ks)
    this_ks_d = KnowledgeServer.this_knowledge_server('default')
    
    dssContinentState=DataSetStructure()
    dssContinentState.name="Test Continent-SubContinent-State";
    dssContinentState.SetNotNullFields()
    dssContinentState.save()

    mmContinent=dssContinentState.create_model_metadata(name="Continent",module="test1",name_field="name",description_field="")
    mmSubContinent=dssContinentState.create_model_metadata(name="SubContinent",module="test1",name_field="name",description_field="")
    mmState=dssContinentState.create_model_metadata(name="State",module="test1",name_field="name",description_field="")
    
    # It creates ModelMetadata and a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized. 
    KnowledgeServer.register_models([mmContinent, mmSubContinent, mmState])
     
    # it creates the root node from the ModelMetadata provided
    dssContinentState.root_model_metadata(mmContinent)
    # child nodes for two attributes/fields
    dssContinentState.root_node.children_for(["subcontinent_set", "state_set"], this_ks_d.netloc)
    dssContinentState.save()
    
    dss_dss = DataSetStructure.get_from_name(DataSetStructure.dataset_structure_DSN)
    
    ds = DataSet(description='DataSet for data set structure "Continent-SubContinent-State"', knowledge_server=this_ks_d, dataset_structure=dss_dss, root=dssContinentState, version_major=0, version_minor=1, version_patch=0, version_description="")
    ds.save();
    ds.set_released()
        
    europe = Continent();europe.name="Europe";europe.save()
    asia = Continent();asia.name="Asia";asia.save()
    south_europe=SubContinent();south_europe.name="South Europe";south_europe.continent=europe;south_europe.save()
    central_europe=SubContinent();central_europe.name="Central Europe";central_europe.continent=europe;central_europe.save()
    italy=State();italy.name="Italy";italy.sub_continent=south_europe;italy.continent=europe;italy.save()
    spain=State();spain.name="Spain";spain.sub_continent=south_europe;spain.continent=europe;spain.save()
    germany=State();germany.name="Germany";germany.sub_continent=central_europe;germany.continent=europe;germany.save()
    
    ds = DataSet(knowledge_server=this_ks_d,dataset_structure=dssContinentState,root=europe,
                 description="Europe",version_major=0,version_minor=1,version_patch=0,version_description="")
    ds.save();ds.set_released(); 
    ds = DataSet(knowledge_server=this_ks_d,dataset_structure=dssContinentState,root=asia,
                 description="Asia",version_major=0,version_minor=1,version_patch=0,version_description="")
    ds.save();ds.set_released(); 
Пример #8
0
def api_dataset_info(request, DataSet_UKCL, response_format):
    '''
        #52 
        
        Parameters:
        * response_format { 'XML' | 'JSON' | 'HTML' = 'BROWSE' }
        * DataSet_UKCL: UKCL of the DataSet 
        
        Implementation:
        it fetches the DataSet, then the list of all that share the same root
        it returns DataSet.export(response_format) and for each on the above list:
            the UKCL of the DataSet
            the version status {working | released | obsolete}
            the version number (e.g. 0.1.0)
            the version date
            the version description
            other version metadata
        CAN BE CACHED
    '''
    response_format = response_format.upper()
    DataSet_UKCL_unquoted = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/")
    dataset = DataSet.retrieve_locally(DataSet_UKCL_unquoted)
    all_versions = DataSet.objects.filter(first_version = dataset.first_version)
    all_versions_serialized = ""
    all_versions_list = []
    if response_format != 'HTML' and response_format != 'BROWSE':
        for v in all_versions:
            if response_format == 'JSON':
                # note I am using DICT as a response_format so that I can merge the dict (.update) and then convert it to json
                all_versions_list.append(v.export(export_format = 'DICT', force_external_reference=True))
            else:
                all_versions_serialized += v.export(export_format = response_format, force_external_reference=True)
    if response_format == 'XML':
        ar = ApiResponse()
        ar.status = ApiResponse.success
        ar.content = "<DataSet>" + dataset.export(export_format = response_format, force_external_reference=True) + "</DataSet><Versions>" + all_versions_serialized + "</Versions>"
        return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml")
    if response_format == 'JSON':
        ar = ApiResponse()
        ar.content = { "DataSet": dataset.export(export_format = "DICT", force_external_reference=True), "Versions": all_versions_list }
        ar.status = ApiResponse.success
        return HttpResponse(ar.json(), content_type = "application/json") 
#         return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
    if response_format == 'HTML' or response_format == 'BROWSE':
        if dataset.dataset_structure.is_a_view:
            instances = dataset.get_instances()
        else:
            instances = []
            instances.append(dataset.root)
        all_versions_with_instances = []
        for v in all_versions:
            if v.UKCL != dataset.UKCL:
                version_with_instance = {}
                version_with_instance['dataset'] = v
                version_with_instance['root'] = []
                # views have no version by themselves; only their components have and they can be different
                # so if we are here we are not in a view hence there is just one instance: 
                #         I get root and not .get_instances()
                version_with_instance['root'].append(v.root)
                all_versions_with_instances.append(version_with_instance)
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(request, {'DataSet_UKCL': DataSet_UKCL, 'dataset': dataset, 'all_versions_with_instances': all_versions_with_instances, 'ks': dataset.knowledge_server, 'instances': instances, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True) })
        return render_to_response('knowledge_server/api_dataset_info.html', context_instance=cont)