Пример #1
0
def api_dataset_view(request, DataSet_UKCL, root_id, response_format):
    '''
        it returns the data of the istance with pk=root_id in the dataset (which is a view)
        if we are browsing a view there is not just one single root that we can explore
        but a list of instances that match the criteria; root_id tells us which one to browse
        CAN BE CACHED
    '''
    response_format = response_format.upper()
    DataSet_UKCL_decoded = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/")
    dataset = DataSet.retrieve_locally(DataSet_UKCL_decoded)
    actual_instance = ""
    actual_instance_json = ""
    # this dataset is a view; I shall use root_id to retrieve the actual instance
    module_name = dataset.dataset_structure.root_node.model_metadata.module
    dataset_uri = KsUrl(DataSet_UKCL_decoded)
    actual_instance_class = OrmWrapper.load_class(dataset_uri.netloc, module_name, dataset.dataset_structure.root_node.model_metadata.name) 
    actual_instance = actual_instance_class.objects.get(pk=root_id)
    
    if response_format == 'HTML' or response_format == 'BROWSE':
        actual_instance_json = '{' + actual_instance.serialize(dataset.dataset_structure.root_node, export_format='json', exported_instances = []) + '}'
    if response_format == 'JSON':
        ar = ApiResponse()
        ar.content = { "DataSet": dataset.export(export_format = 'DICT') }
        ar.status = ApiResponse.success
        return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
    if response_format == 'XML':
        ar = ApiResponse()
        ar.status = ApiResponse.success
        ar.content = dataset.export(export_format = response_format)
        return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml")
    if response_format == 'HTML' or response_format == 'BROWSE':
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(request, {'dataset': dataset, 'actual_instance': actual_instance, 'actual_instance_json': actual_instance_json, 'sn': dataset.dataset_structure.root_node, 'DataSet_UKCL': DataSet_UKCL, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True)})
        return render_to_response('knowledge_server/browse_dataset.html', context_instance=cont)
Пример #2
0
def api_catch_all(request, uri_instance):
    '''
        parameters:
            url: http://root.beta.thekoa.org/knowledge_server/Attribute/1
            url: http://root.beta.thekoa.org/knowledge_server/Attribute/1/xml
            url: http://root.beta.thekoa.org/knowledge_server/Attribute/1/json
        
        Implementation:
            I do something only if it is a UKCL in my database; otherwise I return a not found message
            If there is a trailing string for the response_format I use it, otherwise I apply the default xml
            The trailing string can be "/xml", "/xml/", "/json", "/json/" where each character can 
            be either upper or lower case   
        CAN BE CACHED
    '''
    # I search for a response_format string, a UKCL has no trailing slash
    response_format = 'XML' #default
    if uri_instance[-1:] == "/":
        #I remove a trailing slash
        uri_instance = uri_instance[:-1]
    if uri_instance[-3:].lower() == "xml":
        uri_instance = uri_instance[:-4]
    if uri_instance[-4:].lower() == "json":
        response_format = 'JSON'
        uri_instance = uri_instance[:-5]
        
    try:
        split_path = uri_instance.split('/')
        if len(split_path) == 3:
            module_name = split_path[0]
            simple_entity_name = split_path[1]
            this_ks = KnowledgeServer.this_knowledge_server()
            actual_class = OrmWrapper.load_class(this_ks.netloc, module_name, simple_entity_name)
            instance = actual_class.retrieve_locally(this_ks.url() + "/" + uri_instance)
            if response_format == 'JSON':
                exported_json = '{ "Export" : { "ExportDateTime" : "' + str(datetime.now()) + '", ' + instance.serialize(export_format='JSON', exported_instances = []) + ' } }'
                return render(request, 'knowledge_server/export.json', {'json': exported_json}, content_type="application/json")
            if response_format == 'XML':
                exported_xml = "<Export ExportDateTime=\"" + str(datetime.now()) + "\">" + instance.serialize(export_format='XML', exported_instances = []) + "</Export>"
                xmldoc = minidom.parseString(exported_xml)
                exported_pretty_xml = xmldoc.toprettyxml(indent="    ")
                return render(request, 'knowledge_server/export.xml', {'xml': exported_pretty_xml}, content_type="application/xhtml+xml")
        else:
            raise(Exception("The url '" + uri_instance + "' does not match the UKCL format"))
    except Exception as es:
        if response_format == 'JSON':
            exported_json = '{ "Export" : { "ExportDateTime" : "' + str(datetime.now()) + '", "Error" : "' + str(es) + '" } }'
            return render(request, 'knowledge_server/export.json', {'json': exported_json}, content_type="application/json")
        if response_format == 'XML':
            exported_xml = "<Export ExportDateTime=\"" + str(datetime.now()) + "\" Error=\"" + str(es) + "\"/>"
            xmldoc = minidom.parseString(exported_xml)
            exported_pretty_xml = xmldoc.toprettyxml(indent="    ")
            return render(request, 'knowledge_server/export.xml', {'xml': exported_pretty_xml}, content_type="application/xhtml+xml")
Пример #3
0
def api_dataset(request, DataSet_UKCL, response_format):
    '''
        #36
        It returns the data in the dataset with the UKCL in the parameter 
        
        parameter:
        * DataSet_UKCL: UKCL of the DataSet 
        
        Implementation:
        # it creates the ModelMetadata class, 
        # fetches from the DB the one with pk = DataSet.root_instance_id
        # it runs to_xml of the ModelMetadata using DataSet.dataset_structure.root_node
        CAN BE CACHED
    '''
    response_format = response_format.upper()
    ar = ApiResponse()
    DataSet_UKCL_decoded = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/")
    
    url = KsUrl(DataSet_UKCL_decoded)
    # If it is not a DataSet we try to find the dataset it is in
    url.search_on_db()
    if url.actual_instance:
        if isinstance(url.actual_instance, DataSet):
            dataset = url.actual_instance
        else:
            dataset = url.actual_instance.dataset_I_belong_to
    if (not url.actual_instance) and (not dataset):
        ar.message = "Either the URL requested is not on this database or it is not part of a released dataset."
        if response_format == 'JSON':
            return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
        if response_format == 'XML':
            ar.status = ApiResponse.failure
            return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml")

    actual_instance_json = ""
    #this dataset is not a view; if not dataset.dataset_structure.is_a_view:
    actual_instance = dataset.root

    if response_format == 'JSON':
        ar.content = { "DataSet": dataset.export(export_format = 'DICT') }
        ar.status = ApiResponse.success
        return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
    if response_format == 'XML':
        ar.status = ApiResponse.success
        ar.content = dataset.export(export_format = response_format)
        return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml")
    if response_format == 'HTML' or response_format == 'BROWSE':
        actual_instance_json = '{' + actual_instance.serialize(dataset.dataset_structure.root_node, export_format='json', exported_instances = []) + '}'
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(request, {'dataset': dataset, 'actual_instance': actual_instance, 'actual_instance_json': actual_instance_json, 'sn': dataset.dataset_structure.root_node, 'DataSet_UKCL': DataSet_UKCL, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True)})
        return render_to_response('knowledge_server/browse_dataset.html', context_instance=cont)
def forwards_func(apps, schema_editor):
    org_ks={
      "Organization": {"name": "A test Organization", "website": "http://new_org.example.com", "description": ""}, 
      "KnowledgeServer": {"name": "A test OKS.", "netloc": "test.thekoa.org", "description": "It has some datasets and structures", "html_home": "", "html_disclaimer": ""}
     }
    KnowledgeServer.create_this_ks(org_ks)
    this_ks_d = KnowledgeServer.this_knowledge_server('default')
    
    dssContinentState=DataSetStructure()
    dssContinentState.name="Test Continent-SubContinent-State";
    dssContinentState.SetNotNullFields()
    dssContinentState.save()

    mmContinent=dssContinentState.create_model_metadata(name="Continent",module="test1",name_field="name",description_field="")
    mmSubContinent=dssContinentState.create_model_metadata(name="SubContinent",module="test1",name_field="name",description_field="")
    mmState=dssContinentState.create_model_metadata(name="State",module="test1",name_field="name",description_field="")
    
    # It creates ModelMetadata and a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized. 
    KnowledgeServer.register_models([mmContinent, mmSubContinent, mmState])
     
    # it creates the root node from the ModelMetadata provided
    dssContinentState.root_model_metadata(mmContinent)
    # child nodes for two attributes/fields
    dssContinentState.root_node.children_for(["subcontinent_set", "state_set"], this_ks_d.netloc)
    dssContinentState.save()
    
    dss_dss = DataSetStructure.get_from_name(DataSetStructure.dataset_structure_DSN)
    
    ds = DataSet(description='DataSet for data set structure "Continent-SubContinent-State"', knowledge_server=this_ks_d, dataset_structure=dss_dss, root=dssContinentState, version_major=0, version_minor=1, version_patch=0, version_description="")
    ds.save();
    ds.set_released()
        
    europe = Continent();europe.name="Europe";europe.save()
    asia = Continent();asia.name="Asia";asia.save()
    south_europe=SubContinent();south_europe.name="South Europe";south_europe.continent=europe;south_europe.save()
    central_europe=SubContinent();central_europe.name="Central Europe";central_europe.continent=europe;central_europe.save()
    italy=State();italy.name="Italy";italy.sub_continent=south_europe;italy.continent=europe;italy.save()
    spain=State();spain.name="Spain";spain.sub_continent=south_europe;spain.continent=europe;spain.save()
    germany=State();germany.name="Germany";germany.sub_continent=central_europe;germany.continent=europe;germany.save()
    
    ds = DataSet(knowledge_server=this_ks_d,dataset_structure=dssContinentState,root=europe,
                 description="Europe",version_major=0,version_minor=1,version_patch=0,version_description="")
    ds.save();ds.set_released(); 
    ds = DataSet(knowledge_server=this_ks_d,dataset_structure=dssContinentState,root=asia,
                 description="Asia",version_major=0,version_minor=1,version_patch=0,version_description="")
    ds.save();ds.set_released(); 
Пример #5
0
def forwards_func(apps, schema_editor):
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dss_workflow = DataSetStructure(name='Workflow')
    dss_workflow.description = 'Workflow-Method-Attribute-PermissionStatement'
    dss_workflow.SetNotNullFields()
    dss_workflow.save()

    mm_workflow = Workflow().get_model_metadata(db_alias='default')
    mm_workflow_method = dss_workflow.create_model_metadata(name="WorkflowMethod",module="ap",name_field="name",description_field="description")
    mm_attribute = dss_workflow.create_model_metadata(name="Attribute",module="ap",name_field="",description_field="")
    mm_widget = dss_workflow.create_model_metadata(name="Widget",module="ap",name_field="",description_field="")
    mm_attribute_in_a_method = dss_workflow.create_model_metadata(name="AttributeInAMethod",module="ap",name_field="",description_field="")
    mm_permission_statement = dss_workflow.create_model_metadata(name="PermissionStatement",module="ap",name_field="",description_field="")
    mm_permission_holder = dss_workflow.create_model_metadata(name="PermissionHolder",module="ap",name_field="",description_field="")

    # It creates a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized. 
    KnowledgeServer.register_models([mm_workflow_method, mm_attribute_in_a_method, mm_permission_statement, mm_attribute, mm_widget, mm_permission_holder])

    # it creates the root node from the ModelMetadata provided
    dss_workflow.root_model_metadata(mm_workflow)
    # child nodes for two attributes/fields
    dss_workflow.root_node.children_nodes_for(["methods"], this_ks_d.netloc)
    dss_workflow.root_node.children_external_references_nodes_for( [ "type" ], this_ks_d.netloc )

    method_node = dss_workflow.root_node.child_nodes.all()[0]
    method_node.children_external_references_nodes_for(["initial_statuses", "final_status"], this_ks_d.netloc)
    method_node.children_nodes_for(["attributeinamethod_set", "permission"], this_ks_d.netloc)
    attribute_in_a_method_node = [cn for cn in method_node.child_nodes.all() if cn.attribute == "attributeinamethod_set"][0]
    attribute_in_a_method_node.children_external_references_nodes_for( [ "attribute", "custom_widget" ], this_ks_d.netloc )

    permission_node = [cn for cn in method_node.child_nodes.all() if cn.attribute == "permission"][0]
    permission_node.children_external_references_nodes_for( [ "permission_holder" ], this_ks_d.netloc )

    dss_workflow.save()
    
    dss_dss = DataSetStructure.get_from_name(DataSetStructure.dataset_structure_DSN)
    ds = DataSet(description='DataSet for data set structure "Workflow-Method-Attribute-PermissionStatement"', knowledge_server=this_ks_d, dataset_structure=dss_dss, root=dss_workflow, version_major=0, version_minor=1, version_patch=0, version_description="")
    ds.save()
    ds.set_released()
        

    # Application now is on its own dataset structure so we don't create it and just use the shallow one
    mm_application = dss_workflow.create_model_metadata(name='Application',module='ap',name_field="name",description_field="description")
    KnowledgeServer.register_models([mm_application])
Пример #6
0
def api_ks_info(request, response_format):
    '''
        #80

        parameter:
        * response_format { 'XML' | 'JSON' }
        
        Implementation:
          it fetches this KS from the DB, takes its Organization and exports
          it with the structure "Organization-KS"
        CAN BE CACHED
    '''
    response_format = response_format.upper()
    this_ks = KnowledgeServer.this_knowledge_server()    
    dss = DataSetStructure.objects.get(name = DataSetStructure.organization_DSN)
    dataset = DataSet.objects.get(dataset_structure=dss, root_instance_id=this_ks.organization.id)
    return api_dataset(request, dataset.UKCL, response_format)
Пример #7
0
def forwards_func(apps, schema_editor):
    this_ks_d = KnowledgeServer.this_knowledge_server('default')

    dss_workflow = DataSetStructure(name='Workflow')
    dss_workflow.description = 'Workflow-Method-Attribute-PermissionStatement'
    dss_workflow.SetNotNullFields()
    dss_workflow.save()

    mm_workflow = Workflow().get_model_metadata(db_alias='default')
    models_for_dss_workflow = [
        {
            "name": "Attribute",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "Widget",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "AttributeInAMethod",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
        {
            "name": "PermissionStatement",
            "module": "ap",
            "name_field": "",
            "description_field": ""
        },
    ]
    mm_attribute, mm_widget, mm_attribute_in_a_method, mm_permission_statement, mm_permission_holder = dss_workflow.create_many_model_metadata(
        models_for_dss_workflow)

    mm_workflow_method = dss_workflow.create_model_metadata(
        name="WorkflowMethod", module="ap")
    mm_attribute = dss_workflow.create_model_metadata(name="Attribute",
                                                      module="ap",
                                                      name_field="",
                                                      description_field="")
    mm_widget = dss_workflow.create_model_metadata(name="Widget",
                                                   module="ap",
                                                   name_field="",
                                                   description_field="")
    mm_attribute_in_a_method = dss_workflow.create_model_metadata(
        name="AttributeInAMethod",
        module="ap",
        name_field="",
        description_field="")
    mm_permission_statement = dss_workflow.create_model_metadata(
        name="PermissionStatement",
        module="ap",
        name_field="",
        description_field="")
    # check quale DSS
    mm_attribute_type = dss_workflow.create_model_metadata(
        name="AttributeType", module="ap")
    mm_attribute_group = dss_workflow.create_model_metadata(
        name="AttributeGroup", module="ap")
    # WorkflowsMethods è un many to many through, va fatto ?
    WorkflowTransition
    Application
    ModelMetadataSearch
    ApplicationStructureNodeSearch
    AttributeInASearch
    KSUser
    KSRole
    KSGroup
    # check quale DSS; sul diagramma sul foglio?

    # It creates a DataSet for each of them; having the DataSetStructure makes it
    # possible to release and materialize the datasets with dangling references that will be
    # resolved once the dss is released and materialized.
    KnowledgeServer.register_models([
        mm_workflow_method, mm_attribute_in_a_method, mm_permission_statement,
        mm_attribute, mm_attribute_type, mm_attribute_group, mm_widget,
        mm_permission_holder
    ])

    # it creates the root node from the ModelMetadata provided
    dss_workflow.root_model_metadata(mm_workflow)
    # child nodes for two attributes/fields
    dss_workflow.root_node.children_nodes_for(["methods"], this_ks_d.netloc)
    dss_workflow.root_node.children_external_references_nodes_for(
        ["type"], this_ks_d.netloc)

    method_node = dss_workflow.root_node.child_nodes.all()[0]
    method_node.children_external_references_nodes_for(
        ["initial_statuses", "final_status"], this_ks_d.netloc)
    method_node.children_nodes_for(["attributeinamethod_set", "permission"],
                                   this_ks_d.netloc)
    attribute_in_a_method_node = [
        cn for cn in method_node.child_nodes.all()
        if cn.attribute == "attributeinamethod_set"
    ][0]
    attribute_in_a_method_node.children_external_references_nodes_for(
        ["attribute", "custom_widget"], this_ks_d.netloc)

    permission_node = [
        cn for cn in method_node.child_nodes.all()
        if cn.attribute == "permission"
    ][0]
    permission_node.children_external_references_nodes_for(
        ["permission_holder"], this_ks_d.netloc)

    dss_workflow.save()

    dss_dss = DataSetStructure.get_from_name(
        DataSetStructure.dataset_structure_DSN)
    ds = DataSet(
        description=
        'DataSet for data set structure "Workflow-Method-Attribute-PermissionStatement"',
        knowledge_server=this_ks_d,
        dataset_structure=dss_dss,
        root=dss_workflow,
        version_major=0,
        version_minor=1,
        version_patch=0,
        version_description="")
    ds.save()
    ds.set_released()

    # Application now is on its own dataset structure so we don't create it and just use the shallow one
    mm_application = dss_workflow.create_model_metadata(
        name='Application',
        module='ap',
        name_field="name",
        description_field="description")
    KnowledgeServer.register_models([mm_application])
Пример #8
0
def api_dataset_info(request):
    '''
        #52 
        
        Parameters:
        * response_format { 'XML' | 'JSON' | 'HTML' = 'BROWSE' }
        * DataSet_UKCL: UKCL of the DataSet 
        
        Implementation:
        it fetches the DataSet, then the list of all that share the same root
        it returns DataSet.export(response_format) and for each on the above list:
            the UKCL of the DataSet
            the version status {working | released | obsolete}
            the version number (e.g. 0.1.0)
            the version date
            the version description
            other version metadata
        CAN BE CACHED
    '''
    DataSet_UKCL = request.GET['UKCL']
    ar = ApiResponse(request=request)
    DataSet_UKCL_unquoted = urllib.parse.unquote(DataSet_UKCL).replace(
        "%2F", "/")
    dataset = DataSet.retrieve_locally(DataSet_UKCL_unquoted)
    all_versions = DataSet.objects.filter(first_version=dataset.first_version)
    all_versions_serialized = ""
    all_versions_list = []
    if ar.response_format != 'HTML' and ar.response_format != 'BROWSE':
        for v in all_versions:
            if ar.response_format == 'JSON':
                # note I am using DICT as a response_format so that I can merge the dict (.update) and then convert it to json
                all_versions_list.append(
                    v.export(export_format='DICT',
                             force_external_reference=True))
            else:
                all_versions_serialized += v.export(
                    export_format=ar.response_format,
                    force_external_reference=True)
    if ar.response_format == 'XML':
        ar.status = ApiResponse.success
        ar.content = "<DataSet>" + dataset.export(
            export_format=ar.response_format, force_external_reference=True
        ) + "</DataSet><Versions>" + all_versions_serialized + "</Versions>"
        return render(request,
                      'knowledge_server/export.xml', {'xml': ar.xml()},
                      content_type="application/xhtml+xml")
    if ar.response_format == 'JSON':
        ar.content = {
            "DataSet":
            dataset.export(export_format="DICT",
                           force_external_reference=True),
            "Versions":
            all_versions_list
        }
        ar.status = ApiResponse.success
        return HttpResponse(ar.json(), content_type="application/json")


#         return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
    if ar.response_format == 'HTML' or ar.response_format == 'BROWSE':
        if dataset.dataset_structure.is_a_view:
            instances = dataset.get_instances()
        else:
            instances = []
            instances.append(dataset.root)
        all_versions_with_instances = []
        for v in all_versions:
            if v.UKCL != dataset.UKCL:
                version_with_instance = {}
                version_with_instance['dataset'] = v
                version_with_instance['root'] = []
                # views have no version by themselves; only their components have and they can be different
                # so if we are here we are not in a view hence there is just one instance:
                #         I get root and not .get_instances()
                version_with_instance['root'].append(v.root)
                all_versions_with_instances.append(version_with_instance)
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(
            request, {
                'DataSet_UKCL': DataSet_UKCL,
                'dataset': dataset,
                'all_versions_with_instances': all_versions_with_instances,
                'ks': dataset.knowledge_server,
                'instances': instances,
                'this_ks': this_ks,
                'this_ks_encoded_url': this_ks.url(True)
            })
        return render_to_response('knowledge_server/api_dataset_info.html',
                                  context_instance=cont)
Пример #9
0
def api_catch_all(request, uri_instance):
    '''
        parameters:
            url: http://root.beta.thekoa.org/knowledge_server/Attribute/1
            url: http://root.beta.thekoa.org/knowledge_server/Attribute/1/xml
            url: http://root.beta.thekoa.org/knowledge_server/Attribute/1/json
        
        Implementation:
            I do something only if it is a UKCL in my database; otherwise I return a not found message
            If there is a trailing string for the response_format I use it, otherwise I apply the default xml
            The trailing string can be "/xml", "/xml/", "/json", "/json/" where each character can 
            be either upper or lower case   
        CAN BE CACHED
    '''
    # I search for a response_format string, a UKCL has no trailing slash
    response_format = 'XML'  #default
    if uri_instance[-1:] == "/":
        #I remove a trailing slash
        uri_instance = uri_instance[:-1]
    if uri_instance[-3:].lower() == "xml":
        uri_instance = uri_instance[:-4]
    if uri_instance[-4:].lower() == "json":
        response_format = 'JSON'
        uri_instance = uri_instance[:-5]

    try:
        split_path = uri_instance.split('/')
        if len(split_path) == 3:
            module_name = split_path[0]
            simple_entity_name = split_path[1]
            this_ks = KnowledgeServer.this_knowledge_server()
            actual_class = OrmWrapper.load_class(this_ks.netloc, module_name,
                                                 simple_entity_name)
            instance = actual_class.retrieve_locally(this_ks.url() + "/" +
                                                     uri_instance)
            if response_format == 'JSON':
                exported_json = '{ "Export" : { "ExportDateTime" : "' + str(
                    datetime.now()) + '", ' + instance.serialize(
                        export_format='JSON', exported_instances=[]) + ' } }'
                return render(request,
                              'knowledge_server/export.json',
                              {'json': exported_json},
                              content_type="application/json")
            if response_format == 'XML':
                exported_xml = "<Export ExportDateTime=\"" + str(
                    datetime.now()) + "\">" + instance.serialize(
                        export_format='XML',
                        exported_instances=[]) + "</Export>"
                xmldoc = minidom.parseString(exported_xml)
                exported_pretty_xml = xmldoc.toprettyxml(indent="    ")
                return render(request,
                              'knowledge_server/export.xml',
                              {'xml': exported_pretty_xml},
                              content_type="application/xhtml+xml")
        else:
            raise (Exception("The url '" + uri_instance +
                             "' does not match the UKCL format"))
    except Exception as es:
        if response_format == 'JSON':
            exported_json = '{ "Export" : { "ExportDateTime" : "' + str(
                datetime.now()) + '", "Error" : "' + str(es) + '" } }'
            return render(request,
                          'knowledge_server/export.json',
                          {'json': exported_json},
                          content_type="application/json")
        if response_format == 'XML':
            exported_xml = "<Export ExportDateTime=\"" + str(
                datetime.now()) + "\" Error=\"" + str(es) + "\"/>"
            xmldoc = minidom.parseString(exported_xml)
            exported_pretty_xml = xmldoc.toprettyxml(indent="    ")
            return render(request,
                          'knowledge_server/export.xml',
                          {'xml': exported_pretty_xml},
                          content_type="application/xhtml+xml")
Пример #10
0
def api_dataset(request, UKCL=None):
    '''
        #36
        It returns the data in the dataset with the UKCL in the parameter 
        
        parameter:
        * DataSet_UKCL: UKCL of the DataSet 
        
        Implementation:
        # it creates the ModelMetadata class, 
        # fetches from the DB the one with pk = DataSet.root_instance_id
        # it runs to_xml of the ModelMetadata using DataSet.dataset_structure.root_node
        CAN BE CACHED
    '''
    if UKCL:
        # invoked from ks_info
        DataSet_UKCL = UKCL
    else:
        DataSet_UKCL = request.GET['UKCL']
    ar = ApiResponse(request=request)

    url = KsUrl(DataSet_UKCL)
    # If it is not a DataSet we try to find the dataset it is in
    url.search_on_db()
    if url.actual_instance:
        if isinstance(url.actual_instance, DataSet):
            dataset = url.actual_instance
        else:
            dataset = url.actual_instance.dataset_I_belong_to
    if (not url.actual_instance) and (not dataset):
        ar.message = "Either the URL requested is not on this database or it is not part of a released dataset."
        if ar.response_format == 'JSON':
            return render(request,
                          'knowledge_server/export.json', {'json': ar.json()},
                          content_type="application/json")
        if ar.response_format == 'XML':
            ar.status = ApiResponse.failure
            return render(request,
                          'knowledge_server/export.xml', {'xml': ar.xml()},
                          content_type="application/xhtml+xml")

    actual_instance_json = ""
    #this dataset is not a view; if not dataset.dataset_structure.is_a_view:
    actual_instance = dataset.root

    if ar.response_format == 'JSON':
        ar.content = {"DataSet": dataset.export(export_format='DICT')}
        ar.status = ApiResponse.success
        return render(request,
                      'knowledge_server/export.json', {'json': ar.json()},
                      content_type="application/json")
    if ar.response_format == 'XML':
        ar.status = ApiResponse.success
        ar.content = dataset.export(export_format=ar.response_format)
        return render(request,
                      'knowledge_server/export.xml', {'xml': ar.xml()},
                      content_type="application/xhtml+xml")
    if ar.response_format == 'HTML' or ar.response_format == 'BROWSE':
        actual_instance_json = '{' + actual_instance.serialize(
            dataset.dataset_structure.root_node,
            export_format='json',
            exported_instances=[]) + '}'
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(
            request, {
                'dataset': dataset,
                'actual_instance': actual_instance,
                'actual_instance_json': actual_instance_json,
                'sn': dataset.dataset_structure.root_node,
                'DataSet_UKCL': DataSet_UKCL,
                'this_ks': this_ks,
                'this_ks_encoded_url': this_ks.url(True)
            })
        return render_to_response('knowledge_server/browse_dataset.html',
                                  context_instance=cont)
Пример #11
0
def datasets_of_type(request, ks_url, UKCL, response_format):
    '''
    returns the list of datasets of a specific type/structure
    '''
    this_ks = KnowledgeServer.this_knowledge_server()
    response_format = response_format.upper()
    ks_url = urllib.parse.unquote(ks_url)
    tmp_ks_url = KsUrl(ks_url)

    q_UKCL = UKCL
    UKCL = urllib.parse.unquote(UKCL)
    if this_ks.scheme != tmp_ks_url.scheme or this_ks.netloc != tmp_ks_url.netloc:
        # info on the remote ks
        ar_ks_info = ApiResponse()
        ar_ks_info.invoke_oks_api(ks_url, 'api_ks_info') + "?format=JSON"
        organization = ar_ks_info.content['DataSet']['ActualInstance'][
            'Organization']
        for ks in organization['knowledgeserver_set']:
            if ks['this_ks']:
                external_ks_json = ks
        external_ks = KnowledgeServer()
        external_ks.name = external_ks_json['name']
        external_ks.scheme = external_ks_json['scheme']
        external_ks.netloc = external_ks_json['netloc']
        external_ks.description = external_ks_json['description']
        browsing_this = False
    else:
        external_ks = this_ks
        organization = this_ks.organization
        browsing_this = True
    # info on the DataSetStructure
    # TODO: the following call relies on api_catch_all; use dataset_info instead
    response = urlopen(UKCL + "/json")
    es_info_json = json_loads(response.read().decode("utf-8"))

    if response_format == 'XML':
        local_url = reverse('api_datasets') + ("?UKCL=%s&format=%s" %
                                               (q_UKCL, response_format))
    if response_format == 'JSON' or response_format == 'BROWSE':
        local_url = reverse('api_datasets') + ("?UKCL=%s&format=JSON" % q_UKCL)
    response = urlopen(ks_url + local_url)
    datasets = response.read().decode("utf-8")
    if response_format == 'XML':
        return render(request,
                      'knowledge_server/export.xml', {'xml': datasets},
                      content_type="application/xhtml+xml")
    if response_format == 'JSON':
        return render(request,
                      'knowledge_server/export.json', {'json': datasets},
                      content_type="application/json")
    if response_format == 'BROWSE':
        # parse
        decoded = json_loads(datasets)
        # I prepare a list of UKCL of root so that I can check which I have subscribed to
        first_version_UKCLs = []
        for ei in decoded['content']['DataSets']:
            if 'first_version' in ei:
                first_version_UKCLs.append(ei['first_version']['UKCL'])
            else:
                first_version_UKCLs.append(ei['UKCL'])
        subscribed = SubscriptionToOther.objects.filter(
            first_version_UKCL__in=first_version_UKCLs)
        subscribed_first_version_UKCLs = []
        for s in subscribed:
            subscribed_first_version_UKCLs.append(s.first_version_UKCL)
        datasets = []
        for ei in decoded['content']['DataSets']:
            dataset = {}
            if 'ActualInstance' in ei.keys():
                actual_instance_class = list(ei['ActualInstance'].keys())[0]
                dataset['actual_instance_name'] = ei['ActualInstance'][
                    actual_instance_class]['name']
            else:  #is a view
                dataset['actual_instance_name'] = ei['description']
            dataset['encodedUKCL'] = urllib.parse.urlencode({'':
                                                             ei['UKCL']})[1:]
            dataset['UKCL'] = urllib.parse.quote(ei['UKCL']).replace(
                "/", "%2F")
            subscribed_UKCL = ei['first_version'][
                'UKCL'] if 'first_version' in ei else ei['UKCL']
            dataset[
                'subscribed'] = subscribed_UKCL in subscribed_first_version_UKCLs
            datasets.append(dataset)
        cont = RequestContext(
            request, {
                'browsing_this': browsing_this,
                'datasets': datasets,
                'organization': organization,
                'this_ks': this_ks,
                'this_ks_encoded_url': this_ks.url(True),
                'external_ks': external_ks,
                'es_info_json': es_info_json
            })
        return render_to_response('knowledge_server/datasets_of_type.html',
                                  context_instance=cont)
Пример #12
0
def api_dataset_info(request, DataSet_UKCL, response_format):
    '''
        #52 
        
        Parameters:
        * response_format { 'XML' | 'JSON' | 'HTML' = 'BROWSE' }
        * DataSet_UKCL: UKCL of the DataSet 
        
        Implementation:
        it fetches the DataSet, then the list of all that share the same root
        it returns DataSet.export(response_format) and for each on the above list:
            the UKCL of the DataSet
            the version status {working | released | obsolete}
            the version number (e.g. 0.1.0)
            the version date
            the version description
            other version metadata
        CAN BE CACHED
    '''
    response_format = response_format.upper()
    DataSet_UKCL_unquoted = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/")
    dataset = DataSet.retrieve_locally(DataSet_UKCL_unquoted)
    all_versions = DataSet.objects.filter(first_version = dataset.first_version)
    all_versions_serialized = ""
    all_versions_list = []
    if response_format != 'HTML' and response_format != 'BROWSE':
        for v in all_versions:
            if response_format == 'JSON':
                # note I am using DICT as a response_format so that I can merge the dict (.update) and then convert it to json
                all_versions_list.append(v.export(export_format = 'DICT', force_external_reference=True))
            else:
                all_versions_serialized += v.export(export_format = response_format, force_external_reference=True)
    if response_format == 'XML':
        ar = ApiResponse()
        ar.status = ApiResponse.success
        ar.content = "<DataSet>" + dataset.export(export_format = response_format, force_external_reference=True) + "</DataSet><Versions>" + all_versions_serialized + "</Versions>"
        return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml")
    if response_format == 'JSON':
        ar = ApiResponse()
        ar.content = { "DataSet": dataset.export(export_format = "DICT", force_external_reference=True), "Versions": all_versions_list }
        ar.status = ApiResponse.success
        return HttpResponse(ar.json(), content_type = "application/json") 
#         return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
    if response_format == 'HTML' or response_format == 'BROWSE':
        if dataset.dataset_structure.is_a_view:
            instances = dataset.get_instances()
        else:
            instances = []
            instances.append(dataset.root)
        all_versions_with_instances = []
        for v in all_versions:
            if v.UKCL != dataset.UKCL:
                version_with_instance = {}
                version_with_instance['dataset'] = v
                version_with_instance['root'] = []
                # views have no version by themselves; only their components have and they can be different
                # so if we are here we are not in a view hence there is just one instance: 
                #         I get root and not .get_instances()
                version_with_instance['root'].append(v.root)
                all_versions_with_instances.append(version_with_instance)
        this_ks = KnowledgeServer.this_knowledge_server()
        cont = RequestContext(request, {'DataSet_UKCL': DataSet_UKCL, 'dataset': dataset, 'all_versions_with_instances': all_versions_with_instances, 'ks': dataset.knowledge_server, 'instances': instances, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True) })
        return render_to_response('knowledge_server/api_dataset_info.html', context_instance=cont)