def request_ark(request, dataset_uuid): if request.method == 'GET': context = {} if request.user.is_anonymous(): api_client = get_portal_api_client() else: api_client = request.user.agave_oauth.api_client try: dataset = Dataset(api_client=api_client, uuid=dataset_uuid) associated_ids = dataset.identifiers if _has_identifier(associated_ids, 'ark'): messages.warning(request, "Dataset alread has an ARK identifier.") return HttpResponseRedirect(reverse('ids_projects:project-list-private')) metadata = meta_for_ark(dataset) client = ezidClient('apitest', 'apitest') response = client.Mint('ark:/99999/fk4', metadata) if "success" in response.keys(): ark = response['success'] identifier = Identifier(api_client=api_client, type='ark', uid=ark, dataset=dataset) identifier.save() dataset = _add_identifier_to_dataset(dataset, identifier) else: logger.error("Failed to mint an ARK identifier!") messages.warning(request, "Error in requesting ARK!") return HttpResponseRedirect(reverse('ids_projects:project-list-private')) return render(request, 'ids_projects/datasets/request_doi.html', context) except Exception as e: exception_msg = 'Unable to load process. %s' % e logger.error(exception_msg) messages.warning(request, exception_msg) return HttpResponseRedirect(reverse('ids_projects:project-list-private'))
def request_doi(request, dataset_uuid): if request.method == 'GET': context = {} if request.user.is_anonymous(): api_client = get_portal_api_client() else: api_client = request.user.agave_oauth.api_client try: dataset = Dataset(api_client=api_client, uuid=dataset_uuid) associated_ids = dataset.identifiers if _has_identifier(associated_ids, 'doi'): messages.warning(request, "Dataset alread has a DOI identifier.") return HttpResponseRedirect(reverse('ids_projects:project-list-private')) essential = meta_for_doi(dataset) builder = identifierBuilder() builder.buildXML(essential) xmlObject = builder.getXML() # requesting doi client = ezidClient('apitest', 'apitest') metadata = {} metadata["datacite"] = ET.tostring(xmlObject, encoding = "UTF-8", method = "xml") response = client.Mint('doi:10.5072/FK2', metadata) if "success" in response.keys(): res = response['success'] doi = res.split('|')[0].strip() ark = res.split('|')[1].strip() # update generated ARK as alternative identifier essential_new = update_alternateIdentifier(essential, ark) builder.setAlternateIdentifiers(essential_new) xmlObject = builder.getXML() metadata["datacite"] = ET.tostring(xmlObject, encoding = "UTF-8", method = "xml") response = client.Update(doi, metadata) # save identifier objects identifier = Identifier(api_client=api_client, type='doi', uid=doi, dataset=dataset) identifier.save() dataset = _add_identifier_to_dataset(dataset, identifier) # NOTES: # It seems due to network delay, results are not printed immediately. # However, the metadata were successfully updated in agave # for elem in dataset.identifiers: # print elem.title, elem.uid else: logger.error("Failed to mint a DOI identifier!") messages.warning(request, "Error in requesting DOI!") return HttpResponseRedirect(reverse('ids_projects:project-list-private')) return render(request, 'ids_projects/datasets/request_doi.html', context) except Exception as e: exception_msg = 'Unable to load process. %s' % e logger.error(exception_msg) messages.warning(request, exception_msg) return HttpResponseRedirect(reverse('ids_projects:project-list-private'))
def create(request, dataset_uuid): """Create a new dataset""" identifier_fields = [{'form_field': True, 'id': 'Date', 'label': 'Date', 'required': False}, {'form_field': True, 'id': 'Format', 'label': 'Format', 'required': False}, {'form_field': True, 'id': 'Version', 'label': 'Version', 'required': False}, {'form_field': True, 'id': 'Rights', 'label': 'Rights', 'required': False}, {'form_field': True, 'id': 'creatorName', 'label': 'Creator Name', 'required': False}, {'form_field': True, 'id': 'description', 'label': 'Description', 'required': False}, {'form_field': True, 'id': 'subject', 'label': 'Subject', 'required': False}, {'form_field': True, 'id': 'title', 'label': 'Title', 'required': False}, ] api_client = request.user.agave_oauth.api_client dataset = Dataset(api_client=api_client, uuid=dataset_uuid) essential = meta_for_doi(dataset) initial_data = {'creatorName': essential['creators'][0]['creatorName']['text'], 'description': essential['descriptions'][0]['text'], 'subject': essential['subjects'][0]['text'], 'title': essential['titles'][0]['text']} context = dict() ####### # GET # ####### if request.method == 'GET': context['form_identifier_create'] = IdentifierForm(fields=identifier_fields, initial=initial_data) return render(request, 'ids_projects/identifier/create.html', context) ######## # POST # ######## if request.method == 'POST': builder = identifierBuilder() builder.buildXML(essential) xmlObject = builder.getXML() # requesting doi client = ezidClient('apitest', 'apitest') metadata = {} metadata["datacite"] = ET.tostring(xmlObject, encoding = "UTF-8", method = "xml") response = client.Mint('doi:10.5072/FK2', metadata) if "success" in response.keys(): res = response['success'] doi = res.split('|')[0].strip() ark = res.split('|')[1].strip() # update generated ARK as alternative identifier essential_new = update_alternateIdentifier(essential, ark) builder.setAlternateIdentifiers(essential_new) xmlObject = builder.getXML() metadata["datacite"] = ET.tostring(xmlObject, encoding = "UTF-8", method = "xml") response = client.Update(doi, metadata) # save identifier objects identifier = Identifier(api_client=api_client, type='doi', uid=doi, dataset=dataset) identifier.save() dataset = _add_identifier_to_dataset(dataset, identifier) # NOTES: # It seems due to network delay, results are not printed immediately. # However, the metadata were successfully updated in agave # for elem in dataset.identifiers: # print elem.title, elem.uid else: logger.error("Failed to mint a DOI identifier!") messages.warning(request, "Error in requesting DOI!") return HttpResponseRedirect(reverse('ids_projects:project-list-private')) return HttpResponseRedirect(reverse('ids_projects:dataset-view', kwargs={'dataset_uuid': dataset.uuid}))