def api_dataset_structure_code(request): ''' This API is needed just by another OKS and it is not meant to be public It's goal is to provide another OKS with the information needed to generate and migrate the models within a structure. TODO: Models that are external references are not included. The information is provided in the form of the code of the classes in a dictionary that groups them by APP/MODULE CAN BE CACHED ''' DataSetStructure_UKCL = request.GET['UKCL'] dss = DataSetStructure.retrieve_locally( urllib.parse.unquote(DataSetStructure_UKCL).replace("%2F", "/")) try: classes_code = dss.classes_code() return render( request, 'knowledge_server/export.json', { 'json': ApiResponse(ApiResponse.success, "", classes_code).json() }, content_type="application/json") except Exception as ex: return render( request, 'knowledge_server/export.json', {'json': ApiResponse(ApiResponse.failure, str(ex)).json()}, content_type="application/json")
def api_notify(request): ''' #35 it receives a notification; the verb is POST parameters: TODO: first_version_UKCL->first_version_UKCL URL_dataset --> dataset_UKCL URL_structure --> structure_UKCL first_version_UKCL: the UKCL of the first version of the DataSet for which the event has happened event_type: the URInstance of the EventType NEVER CACHED ''' first_version_UKCL = request.POST.get("first_version_UKCL", "") URL_dataset = request.POST.get("URL_dataset", "") URL_structure = request.POST.get("URL_structure", "") event_type = request.POST.get("type", "") # Did I subscribe to this? sto = SubscriptionToOther.objects.filter(first_version_UKCL=first_version_UKCL) ar = ApiResponse() if len(sto) > 0: nr = NotificationReceived() nr.URL_dataset = URL_dataset nr.URL_structure = URL_structure nr.save() ar.status = ApiResponse.success else: ar.status = ApiResponse.failure ar.message = "Not subscribed to this" return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
def api_datasets(request, DataSetStructure_UKCL=None, response_format=None): ''' http://redmine.davide.galletti.name/issues/64 all the released datasets of a given structure/type parameter: * response_format { 'XML' | 'JSON' } * DataSetStructure_UKCL: UKCL of the DataSetStructure encoded Implementation: # it fetches the structure from the DB, looks for all the datasets # with that structure; if it is not a view only those that are released; CAN BE CACHED ''' if not DataSetStructure_UKCL: DataSetStructure_UKCL = request.GET['UKCL'] if response_format: ar = ApiResponse(format=response_format) else: # if not specified I get it from the request object ar = ApiResponse(request=request) dss = DataSetStructure.retrieve_locally( urllib.parse.unquote(DataSetStructure_UKCL).replace("%2F", "/")) # Now I need to get all the released DataSet of the DataSetStructure passed as a parameter if dss.is_a_view: # version_released is not relevant for views released_dataset = DataSet.objects.filter(dataset_structure=dss) else: released_dataset = DataSet.objects.filter(dataset_structure=dss, version_released=True) serialized = "" comma = "" dataset_list = [] for dataset in released_dataset: if ar.response_format == 'JSON': dataset_list.append( dataset.export(export_format="DICT", force_external_reference=True)) else: serialized += dataset.export(export_format=ar.response_format, force_external_reference=True) if ar.response_format == 'XML': ar.status = ApiResponse.success ar.content = "<DataSets>" + serialized + "</DataSets>" return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if ar.response_format == 'JSON': ar.content = {"DataSets": dataset_list} ar.status = ApiResponse.success return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
def json(request): ''' micro test for json ''' try: ar = ApiResponse() ar.content = {"DataSet": "Versions"} ar.status = ApiResponse.success return HttpResponse(ar.json(), content_type="application/json") except Exception as ex: logger.error("views.debug: " + str(ex)) return HttpResponse(str(ex))
def api_dataset_view(request, DataSet_UKCL, root_id, response_format): ''' it returns the data of the istance with pk=root_id in the dataset (which is a view) if we are browsing a view there is not just one single root that we can explore but a list of instances that match the criteria; root_id tells us which one to browse CAN BE CACHED ''' response_format = response_format.upper() DataSet_UKCL_decoded = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/") dataset = DataSet.retrieve_locally(DataSet_UKCL_decoded) actual_instance = "" actual_instance_json = "" # this dataset is a view; I shall use root_id to retrieve the actual instance module_name = dataset.dataset_structure.root_node.model_metadata.module dataset_uri = KsUrl(DataSet_UKCL_decoded) actual_instance_class = OrmWrapper.load_class(dataset_uri.netloc, module_name, dataset.dataset_structure.root_node.model_metadata.name) actual_instance = actual_instance_class.objects.get(pk=root_id) if response_format == 'HTML' or response_format == 'BROWSE': actual_instance_json = '{' + actual_instance.serialize(dataset.dataset_structure.root_node, export_format='json', exported_instances = []) + '}' if response_format == 'JSON': ar = ApiResponse() ar.content = { "DataSet": dataset.export(export_format = 'DICT') } ar.status = ApiResponse.success return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if response_format == 'XML': ar = ApiResponse() ar.status = ApiResponse.success ar.content = dataset.export(export_format = response_format) return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if response_format == 'HTML' or response_format == 'BROWSE': this_ks = KnowledgeServer.this_knowledge_server() cont = RequestContext(request, {'dataset': dataset, 'actual_instance': actual_instance, 'actual_instance_json': actual_instance_json, 'sn': dataset.dataset_structure.root_node, 'DataSet_UKCL': DataSet_UKCL, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True)}) return render_to_response('knowledge_server/browse_dataset.html', context_instance=cont)
def api_unsubscribe(request): ''' #123 parameters: UKCL is the one I want to unsubscribe from NEVER CACHED ''' ar = ApiResponse(request=request) UKCL = request.GET['UKCL'] # remote_url has to be a parameter because without it I would have to guess the remote # server who has subscribed from the IP and/or the domain name in the request: unreliable remote_url = request.GET['remote_url'] # I want to check the request comes from the same domain as the remote_url # I do this checking that the IP address is the same x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') ip_notification = socket.gethostbyname(KsUrl(remote_url).netloc) if ip_notification != ip: return render( request, 'knowledge_server/export.json', { 'json': ApiResponse( ApiResponse.failure, "Apparently this is a malicious attempt to unsubscribe.", "").json() }, content_type="application/json") dataset = DataSet.objects.get(UKCL=UKCL) first_version_UKCL = dataset.first_version.UKCL if SubscriptionToThis.objects.filter(first_version_UKCL=first_version_UKCL, remote_url=remote_url).exists(): stt = SubscriptionToThis.objects.get( first_version_UKCL=first_version_UKCL, remote_url=remote_url) stt.delete() else: logger.warning( "Unsubscribe requested for a non existing subscription: %s from %s" % (UKCL, remote_url)) return render(request, 'knowledge_server/export.json', { 'json': ApiResponse(ApiResponse.failure, "Your request has been processed").json() }, content_type="application/json")
def ks_explorer(request): try: ks_url = urllib.parse.parse_qs( 'url=' + request.GET['ks_complete_url'])['url'][0] except: ks_url = request.POST['ks_complete_url'] try: # use the KsUrl class to clean it up ks_url = KsUrl(ks_url).url this_ks = KnowledgeServer.this_knowledge_server() # info on the remote ks ar_ks_info = ApiResponse() ar_ks_info.invoke_oks_api(ks_url, 'api_ks_info') + "?format=JSON" if ar_ks_info.status == ApiResponse.success: organization = ar_ks_info.content['DataSet']['ActualInstance'][ 'Organization'] for ks in organization['knowledgeserver_set']: if ks['this_ks']: explored_ks = ks # info about structures on the remote ks ar_ds_types = ApiResponse() ar_ds_types.invoke_oks_api(ks_url, 'api_dataset_types') + "?format=JSON" owned_structures = [] other_structures = [] for ei in ar_ds_types.content['DataSets']: entity = {} entity['actual_instance_name'] = ei['ActualInstance'][ 'DataSetStructure']['name'] entity['UKCL'] = urllib.parse.urlencode( {'': ei['ActualInstance']['DataSetStructure']['UKCL']})[1:] entity['oks_name'] = ei['knowledge_server']['name'] external_oks_url = KsUrl(ei['knowledge_server']['UKCL']).home() entity['oks_home'] = urllib.parse.urlencode( {'': external_oks_url})[1:] if ei['knowledge_server']['UKCL'] == explored_ks['UKCL']: owned_structures.append(entity) else: other_structures.append(entity) else: HttpResponse("Error invoking api_ks_info on " + ks_url + " - " + ar_ks_info.message) except Exception as ex: return HttpResponse(str(ex)) cont = RequestContext( request, { 'owned_structures': owned_structures, 'other_structures': other_structures, 'this_ks': this_ks, 'this_ks_encoded_url': this_ks.url(True), 'organization': organization, 'explored_ks': explored_ks, 'ks_url': urllib.parse.urlencode({'': ks_url})[1:] }) return render_to_response('knowledge_server/ks_explorer_entities.html', context_instance=cont)
def api_dataset_view(request): ''' it returns the data of the istance with pk=root_id in the dataset (which is a view) if we are browsing a view there is not just one single root that we can explore but a list of instances that match the criteria; root_id tells us which one to browse CAN BE CACHED ''' DataSet_UKCL = request.GET['UKCL'] root_id = request.GET['id'] ar = ApiResponse(request=request) dataset = DataSet.retrieve_locally(DataSet_UKCL) actual_instance = "" actual_instance_json = "" # this dataset is a view; I shall use root_id to retrieve the actual instance module_name = dataset.dataset_structure.root_node.model_metadata.module dataset_uri = KsUrl(DataSet_UKCL) actual_instance_class = OrmWrapper.load_class( dataset_uri.netloc, module_name, dataset.dataset_structure.root_node.model_metadata.name) actual_instance = actual_instance_class.objects.get(pk=root_id) if ar.response_format == 'HTML' or ar.response_format == 'BROWSE': actual_instance_json = '{' + actual_instance.serialize( dataset.dataset_structure.root_node, export_format='json', exported_instances=[]) + '}' if ar.response_format == 'JSON': ar.content = {"DataSet": dataset.export(export_format='DICT')} ar.status = ApiResponse.success return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if ar.response_format == 'XML': ar.status = ApiResponse.success ar.content = dataset.export(export_format=ar.response_format) return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if ar.response_format == 'HTML' or ar.response_format == 'BROWSE': this_ks = KnowledgeServer.this_knowledge_server() cont = RequestContext( request, { 'dataset': dataset, 'actual_instance': actual_instance, 'actual_instance_json': actual_instance_json, 'sn': dataset.dataset_structure.root_node, 'DataSet_UKCL': DataSet_UKCL, 'this_ks': this_ks, 'this_ks_encoded_url': this_ks.url(True) }) return render_to_response('knowledge_server/browse_dataset.html', context_instance=cont)
def release_dataset(request, Dataset_UKCL): ''' ''' try: Dataset_UKCL = urllib.parse.unquote(Dataset_UKCL) dataset = DataSet.objects.get(UKCL=Dataset_UKCL) dataset.set_released() return render(request, 'knowledge_server/export.json', { 'json': ApiResponse(ApiResponse.success, Dataset_UKCL + " successfully released.").json() }, content_type="application/json") except Exception as ex: return render( request, 'knowledge_server/export.json', {'json': ApiResponse(ApiResponse.failure, str(ex)).json()}, content_type="application/json")
def api_notify(request): ''' #35 it receives a notification; the verb is POST parameters: TODO: first_version_UKCL->first_version_UKCL URL_dataset --> dataset_UKCL URL_structure --> structure_UKCL first_version_UKCL: the UKCL of the first version of the DataSet for which the event has happened event_type: the URInstance of the EventType NEVER CACHED ''' first_version_UKCL = request.POST.get("first_version_UKCL", "") URL_dataset = request.POST.get("URL_dataset", "") URL_structure = request.POST.get("URL_structure", "") event_type = request.POST.get("type", "") # Did I subscribe to this? sto = SubscriptionToOther.objects.filter( first_version_UKCL=first_version_UKCL) ar = ApiResponse() if len(sto) > 0: nr = NotificationReceived() nr.URL_dataset = URL_dataset nr.URL_structure = URL_structure nr.save() ar.status = ApiResponse.success else: ar.status = ApiResponse.failure ar.message = "Not subscribed to this" return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
def this_ks_subscribes_to(request, UKCL): ''' This ks is subscribing to a data set in another ks First I store the subscription locally Then I invoke remotely api_subscribe If it works I commit locally ''' UKCL = str(urllib.parse.unquote(UKCL).replace("%2F", "/")) other_ks_uri = KsUrl(UKCL).home() KnowledgeServer.get_remote_ks(other_ks_uri) try: with transaction.atomic(): encoded_UKCL = urllib.parse.urlencode({'': UKCL})[1:] # invoke remote API to subscribe this_ks = KnowledgeServer.this_knowledge_server() url_to_invoke = urllib.parse.urlencode( {'': this_ks.url() + reverse('api_notify')})[1:] ar = ApiResponse() ar.invoke_oks_api(other_ks_uri, 'api_subscribe') + ( "?UKCL=%s&remote_url=%s" % ((encoded_UKCL, url_to_invoke))) if ar.status == ApiResponse.success: # save locally sto = SubscriptionToOther() sto.URL = UKCL sto.first_version_UKCL = ar.content # it contains the UKCL of the first version sto.save() return render(request, 'knowledge_server/export.json', {'json': ar.response}, content_type="application/json") else: return render(request, 'knowledge_server/export.json', {'json': ar.response}, content_type="application/json") except Exception as ex: return HttpResponse(str(ex))
def api_dataset_types(request): ''' parameters: None Implementation: Invoking api_datasets #64 with parameter "DataSetStructure" so that I get all the Structures in this_ks in a shallow export CAN BE CACHED ''' ar = ApiResponse(request=request) dss = DataSetStructure.objects.using('materialized').get( name=DataSetStructure.dataset_structure_DSN) return api_datasets(request, DataSetStructure_UKCL=dss.UKCL, response_format=ar.response_format)
def api_datasets(request, DataSetStructure_UKCL, response_format): ''' http://redmine.davide.galletti.name/issues/64 all the released datasets of a given structure/type parameter: * response_format { 'XML' | 'JSON' } * DataSetStructure_UKCL: UKCL of the DataSetStructure encoded Implementation: # it fetches the structure from the DB, looks for all the datasets # with that structure; if it is not a view only those that are released; CAN BE CACHED ''' ar = ApiResponse() response_format = response_format.upper() dss = DataSetStructure.retrieve_locally(urllib.parse.unquote(DataSetStructure_UKCL).replace("%2F","/")) # Now I need to get all the released DataSet of the DataSetStructure passed as a parameter if dss.is_a_view: # version_released is not relevant for views released_dataset = DataSet.objects.filter(dataset_structure = dss) else: released_dataset = DataSet.objects.filter(dataset_structure = dss, version_released=True) serialized = "" comma = "" dataset_list = [] for dataset in released_dataset: if response_format == 'JSON': dataset_list.append(dataset.export(export_format = "DICT", force_external_reference=True)) else: serialized += dataset.export(export_format = response_format, force_external_reference=True) if response_format == 'XML': ar.status = ApiResponse.success ar.content = "<DataSets>" + serialized + "</DataSets>" return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if response_format == 'JSON': ar.content = { "DataSets": dataset_list } ar.status = ApiResponse.success return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json")
def api_subscribe(request): ''' #35 parameters: UKCL is the one to which I want to subscribe remote_url the URL this KS has to invoke to notify NEVER CACHED ''' UKCL = request.GET['UKCL'] remote_url = request.GET['remote_url'] # I want to check the request comes from the same domain as the remote_url # I do this checking that the IP address is the same x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') ip_notification = socket.gethostbyname(KsUrl(remote_url).netloc) if ip_notification != ip: return render( request, 'knowledge_server/export.json', { 'json': ApiResponse(ApiResponse.failure, "Trying to subscribe from a different IP address", "").json() }, content_type="application/json") ar = ApiResponse(request=request) # I try to get the remote ks info (if it is a ks) locally remote_ks = KnowledgeServer.get_remote_ks(remote_url) dataset = DataSet.objects.get(UKCL=UKCL) first_version_UKCL = dataset.first_version.UKCL # check the client KS has already subscribed; the check is done using the remote_ks (if any), remote URL otherwise if remote_ks: existing_subscriptions = SubscriptionToThis.objects.filter( first_version_UKCL=first_version_UKCL, remote_ks=remote_ks) else: existing_subscriptions = SubscriptionToThis.objects.filter( first_version_UKCL=first_version_UKCL, remote_url=remote_url) if len(existing_subscriptions) > 0: return render( request, 'knowledge_server/export.json', { 'json': ApiResponse(ApiResponse.failure, "Already subscribed").json() }, content_type="application/json") stt = SubscriptionToThis() stt.first_version_UKCL = first_version_UKCL stt.remote_url = remote_url stt.remote_ks = remote_ks stt.save() return render( request, 'knowledge_server/export.json', { 'json': ApiResponse(ApiResponse.success, "Subscribed sucessfully", first_version_UKCL).json() }, content_type="application/json")
def api_dataset(request, DataSet_UKCL, response_format): ''' #36 It returns the data in the dataset with the UKCL in the parameter parameter: * DataSet_UKCL: UKCL of the DataSet Implementation: # it creates the ModelMetadata class, # fetches from the DB the one with pk = DataSet.root_instance_id # it runs to_xml of the ModelMetadata using DataSet.dataset_structure.root_node CAN BE CACHED ''' response_format = response_format.upper() ar = ApiResponse() DataSet_UKCL_decoded = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/") url = KsUrl(DataSet_UKCL_decoded) # If it is not a DataSet we try to find the dataset it is in url.search_on_db() if url.actual_instance: if isinstance(url.actual_instance, DataSet): dataset = url.actual_instance else: dataset = url.actual_instance.dataset_I_belong_to if (not url.actual_instance) and (not dataset): ar.message = "Either the URL requested is not on this database or it is not part of a released dataset." if response_format == 'JSON': return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if response_format == 'XML': ar.status = ApiResponse.failure return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") actual_instance_json = "" #this dataset is not a view; if not dataset.dataset_structure.is_a_view: actual_instance = dataset.root if response_format == 'JSON': ar.content = { "DataSet": dataset.export(export_format = 'DICT') } ar.status = ApiResponse.success return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if response_format == 'XML': ar.status = ApiResponse.success ar.content = dataset.export(export_format = response_format) return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if response_format == 'HTML' or response_format == 'BROWSE': actual_instance_json = '{' + actual_instance.serialize(dataset.dataset_structure.root_node, export_format='json', exported_instances = []) + '}' this_ks = KnowledgeServer.this_knowledge_server() cont = RequestContext(request, {'dataset': dataset, 'actual_instance': actual_instance, 'actual_instance_json': actual_instance_json, 'sn': dataset.dataset_structure.root_node, 'DataSet_UKCL': DataSet_UKCL, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True)}) return render_to_response('knowledge_server/browse_dataset.html', context_instance=cont)
def api_dataset_info(request): ''' #52 Parameters: * response_format { 'XML' | 'JSON' | 'HTML' = 'BROWSE' } * DataSet_UKCL: UKCL of the DataSet Implementation: it fetches the DataSet, then the list of all that share the same root it returns DataSet.export(response_format) and for each on the above list: the UKCL of the DataSet the version status {working | released | obsolete} the version number (e.g. 0.1.0) the version date the version description other version metadata CAN BE CACHED ''' DataSet_UKCL = request.GET['UKCL'] ar = ApiResponse(request=request) DataSet_UKCL_unquoted = urllib.parse.unquote(DataSet_UKCL).replace( "%2F", "/") dataset = DataSet.retrieve_locally(DataSet_UKCL_unquoted) all_versions = DataSet.objects.filter(first_version=dataset.first_version) all_versions_serialized = "" all_versions_list = [] if ar.response_format != 'HTML' and ar.response_format != 'BROWSE': for v in all_versions: if ar.response_format == 'JSON': # note I am using DICT as a response_format so that I can merge the dict (.update) and then convert it to json all_versions_list.append( v.export(export_format='DICT', force_external_reference=True)) else: all_versions_serialized += v.export( export_format=ar.response_format, force_external_reference=True) if ar.response_format == 'XML': ar.status = ApiResponse.success ar.content = "<DataSet>" + dataset.export( export_format=ar.response_format, force_external_reference=True ) + "</DataSet><Versions>" + all_versions_serialized + "</Versions>" return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if ar.response_format == 'JSON': ar.content = { "DataSet": dataset.export(export_format="DICT", force_external_reference=True), "Versions": all_versions_list } ar.status = ApiResponse.success return HttpResponse(ar.json(), content_type="application/json") # return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if ar.response_format == 'HTML' or ar.response_format == 'BROWSE': if dataset.dataset_structure.is_a_view: instances = dataset.get_instances() else: instances = [] instances.append(dataset.root) all_versions_with_instances = [] for v in all_versions: if v.UKCL != dataset.UKCL: version_with_instance = {} version_with_instance['dataset'] = v version_with_instance['root'] = [] # views have no version by themselves; only their components have and they can be different # so if we are here we are not in a view hence there is just one instance: # I get root and not .get_instances() version_with_instance['root'].append(v.root) all_versions_with_instances.append(version_with_instance) this_ks = KnowledgeServer.this_knowledge_server() cont = RequestContext( request, { 'DataSet_UKCL': DataSet_UKCL, 'dataset': dataset, 'all_versions_with_instances': all_versions_with_instances, 'ks': dataset.knowledge_server, 'instances': instances, 'this_ks': this_ks, 'this_ks_encoded_url': this_ks.url(True) }) return render_to_response('knowledge_server/api_dataset_info.html', context_instance=cont)
def api_dataset(request, UKCL=None): ''' #36 It returns the data in the dataset with the UKCL in the parameter parameter: * DataSet_UKCL: UKCL of the DataSet Implementation: # it creates the ModelMetadata class, # fetches from the DB the one with pk = DataSet.root_instance_id # it runs to_xml of the ModelMetadata using DataSet.dataset_structure.root_node CAN BE CACHED ''' if UKCL: # invoked from ks_info DataSet_UKCL = UKCL else: DataSet_UKCL = request.GET['UKCL'] ar = ApiResponse(request=request) url = KsUrl(DataSet_UKCL) # If it is not a DataSet we try to find the dataset it is in url.search_on_db() if url.actual_instance: if isinstance(url.actual_instance, DataSet): dataset = url.actual_instance else: dataset = url.actual_instance.dataset_I_belong_to if (not url.actual_instance) and (not dataset): ar.message = "Either the URL requested is not on this database or it is not part of a released dataset." if ar.response_format == 'JSON': return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if ar.response_format == 'XML': ar.status = ApiResponse.failure return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") actual_instance_json = "" #this dataset is not a view; if not dataset.dataset_structure.is_a_view: actual_instance = dataset.root if ar.response_format == 'JSON': ar.content = {"DataSet": dataset.export(export_format='DICT')} ar.status = ApiResponse.success return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if ar.response_format == 'XML': ar.status = ApiResponse.success ar.content = dataset.export(export_format=ar.response_format) return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if ar.response_format == 'HTML' or ar.response_format == 'BROWSE': actual_instance_json = '{' + actual_instance.serialize( dataset.dataset_structure.root_node, export_format='json', exported_instances=[]) + '}' this_ks = KnowledgeServer.this_knowledge_server() cont = RequestContext( request, { 'dataset': dataset, 'actual_instance': actual_instance, 'actual_instance_json': actual_instance_json, 'sn': dataset.dataset_structure.root_node, 'DataSet_UKCL': DataSet_UKCL, 'this_ks': this_ks, 'this_ks_encoded_url': this_ks.url(True) }) return render_to_response('knowledge_server/browse_dataset.html', context_instance=cont)
def datasets_of_type(request, ks_url, UKCL, response_format): ''' returns the list of datasets of a specific type/structure ''' this_ks = KnowledgeServer.this_knowledge_server() response_format = response_format.upper() ks_url = urllib.parse.unquote(ks_url) tmp_ks_url = KsUrl(ks_url) q_UKCL = UKCL UKCL = urllib.parse.unquote(UKCL) if this_ks.scheme != tmp_ks_url.scheme or this_ks.netloc != tmp_ks_url.netloc: # info on the remote ks ar_ks_info = ApiResponse() ar_ks_info.invoke_oks_api(ks_url, 'api_ks_info') + "?format=JSON" organization = ar_ks_info.content['DataSet']['ActualInstance'][ 'Organization'] for ks in organization['knowledgeserver_set']: if ks['this_ks']: external_ks_json = ks external_ks = KnowledgeServer() external_ks.name = external_ks_json['name'] external_ks.scheme = external_ks_json['scheme'] external_ks.netloc = external_ks_json['netloc'] external_ks.description = external_ks_json['description'] browsing_this = False else: external_ks = this_ks organization = this_ks.organization browsing_this = True # info on the DataSetStructure # TODO: the following call relies on api_catch_all; use dataset_info instead response = urlopen(UKCL + "/json") es_info_json = json_loads(response.read().decode("utf-8")) if response_format == 'XML': local_url = reverse('api_datasets') + ("?UKCL=%s&format=%s" % (q_UKCL, response_format)) if response_format == 'JSON' or response_format == 'BROWSE': local_url = reverse('api_datasets') + ("?UKCL=%s&format=JSON" % q_UKCL) response = urlopen(ks_url + local_url) datasets = response.read().decode("utf-8") if response_format == 'XML': return render(request, 'knowledge_server/export.xml', {'xml': datasets}, content_type="application/xhtml+xml") if response_format == 'JSON': return render(request, 'knowledge_server/export.json', {'json': datasets}, content_type="application/json") if response_format == 'BROWSE': # parse decoded = json_loads(datasets) # I prepare a list of UKCL of root so that I can check which I have subscribed to first_version_UKCLs = [] for ei in decoded['content']['DataSets']: if 'first_version' in ei: first_version_UKCLs.append(ei['first_version']['UKCL']) else: first_version_UKCLs.append(ei['UKCL']) subscribed = SubscriptionToOther.objects.filter( first_version_UKCL__in=first_version_UKCLs) subscribed_first_version_UKCLs = [] for s in subscribed: subscribed_first_version_UKCLs.append(s.first_version_UKCL) datasets = [] for ei in decoded['content']['DataSets']: dataset = {} if 'ActualInstance' in ei.keys(): actual_instance_class = list(ei['ActualInstance'].keys())[0] dataset['actual_instance_name'] = ei['ActualInstance'][ actual_instance_class]['name'] else: #is a view dataset['actual_instance_name'] = ei['description'] dataset['encodedUKCL'] = urllib.parse.urlencode({'': ei['UKCL']})[1:] dataset['UKCL'] = urllib.parse.quote(ei['UKCL']).replace( "/", "%2F") subscribed_UKCL = ei['first_version'][ 'UKCL'] if 'first_version' in ei else ei['UKCL'] dataset[ 'subscribed'] = subscribed_UKCL in subscribed_first_version_UKCLs datasets.append(dataset) cont = RequestContext( request, { 'browsing_this': browsing_this, 'datasets': datasets, 'organization': organization, 'this_ks': this_ks, 'this_ks_encoded_url': this_ks.url(True), 'external_ks': external_ks, 'es_info_json': es_info_json }) return render_to_response('knowledge_server/datasets_of_type.html', context_instance=cont)
def debug(request): ''' created to debug code Args: request: ''' UKCL = request.GET["UKCL"] return HttpResponse("OK: " + UKCL) try: from django.core import management # management.call_command('migrate', "--database=materialized", interactive=False) # management.call_command('migrate', 'ap', interactive=False) # management.call_command('migrate', interactive=False) management.call_command('migrate', 'ap', interactive=False) # funziona? management.call_command('migrate', "knowledge_server 0003_initial_data --database=materialized", interactive=False) return HttpResponse("OK") import scrapy class DmozItem(scrapy.Item): title = scrapy.Field() link = scrapy.Field() desc = scrapy.Field() class DmozSpider(scrapy.Spider): name = "dmoz" allowed_domains = ["dmoz.org"] start_urls = [ "http://www.dmoz.org/Computers/Programming/Languages/Python/", ] def parse(self, response): for href in response.css( "ul.directory.dir-col > li > a::attr('href')"): url = response.urljoin(href.extract()) yield scrapy.Request(url, callback=self.parse_dir_contents) def parse_dir_contents(self, response): for sel in response.xpath('//ul/li'): item = DmozItem() item['title'] = sel.xpath('a/text()').extract() item['link'] = sel.xpath('a/@href').extract() item['desc'] = sel.xpath('text()').extract() yield item return HttpResponse('OK') ar = ApiResponse() ar.content = {"DataSet": "Versions"} ar.status = ApiResponse.success return HttpResponse(ar.json(), content_type="application/json") # TODO: AGGIORNARE SU STACKOVERFLOW: http://stackoverflow.com/questions/8784400/clearing-specific-cache-in-django from licenses.models import License db_alias = 'default' ccbysa40 = License.objects.using(db_alias).get( short_name="CC-BY-SA-4.0") dssModelMetadataFields = DataSetStructure.get_from_name( DataSetStructure.model_metadata_DSN, db_alias) dssDataSetStructureStructureNode = DataSetStructure.get_from_name( DataSetStructure.dataset_structure_DSN, db_alias) dssOrganizationKS = DataSetStructure.get_from_name( DataSetStructure.organization_DSN, db_alias) for ds in DataSet.objects.using(db_alias).filter( dataset_structure=dssModelMetadataFields): ds.licenses.add(ccbysa40) ds.save() for ds in DataSet.objects.using(db_alias).filter( dataset_structure=dssDataSetStructureStructureNode): ds.licenses.add(ccbysa40) ds.save() for ds in DataSet.objects.using(db_alias).filter( dataset_structure=dssOrganizationKS): ds.licenses.add(ccbysa40) ds.save() db_alias = 'materialized' ccbysa40 = License.objects.using(db_alias).get( short_name="CC-BY-SA-4.0") dssModelMetadataFields = DataSetStructure.get_from_name( DataSetStructure.model_metadata_DSN, db_alias) dssDataSetStructureStructureNode = DataSetStructure.get_from_name( DataSetStructure.dataset_structure_DSN, db_alias) dssOrganizationKS = DataSetStructure.get_from_name( DataSetStructure.organization_DSN, db_alias) for ds in DataSet.objects.using(db_alias).filter( dataset_structure=dssModelMetadataFields): ds.licenses.add(ccbysa40) ds.save() for ds in DataSet.objects.using(db_alias).filter( dataset_structure=dssDataSetStructureStructureNode): ds.licenses.add(ccbysa40) ds.save() for ds in DataSet.objects.using(db_alias).filter( dataset_structure=dssOrganizationKS): ds.licenses.add(ccbysa40) ds.save() return HttpResponse("OK ") from django.core.cache import cache from django.utils.cache import get_cache_key, _generate_cache_header_key from django.utils.encoding import escape_uri_path from django.http import HttpRequest new_request = HttpRequest() new_request.path = 'root.beta.thekoa.org/oks/api/ks_info/JSON/' ##this path works new_request.META['SERVER_PORT'] = request.META['SERVER_PORT'] new_request.META['SERVER_NAME'] = request.META['SERVER_NAME'] key = _generate_cache_header_key("", new_request) if cache.has_key(key): cache.delete(key) full_path = 'http://root.beta.thekoa.org/oks/api/datasets/http%253A%252F%252Froot.beta.thekoa.org%252Fknowledge_server%252FDataSetStructure%252F4/JSON/' import hashlib from django.utils.encoding import force_bytes, iri_to_uri from django.utils.cache import _i18n_cache_key_suffix # code from _generate_cache_header_key url = hashlib.md5(force_bytes(iri_to_uri(full_path))) cache_key = 'views.decorators.cache.cache_header.%s.%s' % ( "", url.hexdigest()) key = _i18n_cache_key_suffix(request, cache_key) if cache.has_key(key): cache.delete(key) return HttpResponse("OK ") # d = DataSet.objects.get(pk=1) # s = d.shallow_structure() # rct = d.root_content_type # # # for structure_child_node in s.root_node.child_nodes.all(): # mm = structure_child_node.sn_model_metadata(d) # print(mm.name) dssContinentState = DataSetStructure() dssContinentState.name = "Test Continent-SubContinent-State" dssContinentState.SetNotNullFields() dssContinentState.save() return HttpResponse("OK ") except Exception as ex: logger.error("views.debug: " + str(ex)) return HttpResponse(str(ex))
def api_dataset_info(request, DataSet_UKCL, response_format): ''' #52 Parameters: * response_format { 'XML' | 'JSON' | 'HTML' = 'BROWSE' } * DataSet_UKCL: UKCL of the DataSet Implementation: it fetches the DataSet, then the list of all that share the same root it returns DataSet.export(response_format) and for each on the above list: the UKCL of the DataSet the version status {working | released | obsolete} the version number (e.g. 0.1.0) the version date the version description other version metadata CAN BE CACHED ''' response_format = response_format.upper() DataSet_UKCL_unquoted = urllib.parse.unquote(DataSet_UKCL).replace("%2F","/") dataset = DataSet.retrieve_locally(DataSet_UKCL_unquoted) all_versions = DataSet.objects.filter(first_version = dataset.first_version) all_versions_serialized = "" all_versions_list = [] if response_format != 'HTML' and response_format != 'BROWSE': for v in all_versions: if response_format == 'JSON': # note I am using DICT as a response_format so that I can merge the dict (.update) and then convert it to json all_versions_list.append(v.export(export_format = 'DICT', force_external_reference=True)) else: all_versions_serialized += v.export(export_format = response_format, force_external_reference=True) if response_format == 'XML': ar = ApiResponse() ar.status = ApiResponse.success ar.content = "<DataSet>" + dataset.export(export_format = response_format, force_external_reference=True) + "</DataSet><Versions>" + all_versions_serialized + "</Versions>" return render(request, 'knowledge_server/export.xml', {'xml': ar.xml()}, content_type="application/xhtml+xml") if response_format == 'JSON': ar = ApiResponse() ar.content = { "DataSet": dataset.export(export_format = "DICT", force_external_reference=True), "Versions": all_versions_list } ar.status = ApiResponse.success return HttpResponse(ar.json(), content_type = "application/json") # return render(request, 'knowledge_server/export.json', {'json': ar.json()}, content_type="application/json") if response_format == 'HTML' or response_format == 'BROWSE': if dataset.dataset_structure.is_a_view: instances = dataset.get_instances() else: instances = [] instances.append(dataset.root) all_versions_with_instances = [] for v in all_versions: if v.UKCL != dataset.UKCL: version_with_instance = {} version_with_instance['dataset'] = v version_with_instance['root'] = [] # views have no version by themselves; only their components have and they can be different # so if we are here we are not in a view hence there is just one instance: # I get root and not .get_instances() version_with_instance['root'].append(v.root) all_versions_with_instances.append(version_with_instance) this_ks = KnowledgeServer.this_knowledge_server() cont = RequestContext(request, {'DataSet_UKCL': DataSet_UKCL, 'dataset': dataset, 'all_versions_with_instances': all_versions_with_instances, 'ks': dataset.knowledge_server, 'instances': instances, 'this_ks':this_ks, 'this_ks_encoded_url':this_ks.url(True) }) return render_to_response('knowledge_server/api_dataset_info.html', context_instance=cont)