def after_create(self, context, pkg_dict): '''A new dataset has been created, so we need to create a new DOI NB: This is called after creation of a dataset, and before resources have been added so state = draft :param context: param pkg_dict: :param pkg_dict: ''' get_or_create_doi(pkg_dict[u'id'])
def test_create_identifier(self): '''Test a DOI has been created with the package We won't have tried pushing to the DataCite service ''' identifier = doi_lib.get_or_create_doi(self.package_dict[u'id']) nose.tools.assert_is_instance(identifier, doi_lib.DOI) nose.tools.assert_equal(identifier.package_id, self.package_dict[u'id']) nose.tools.assert_is_none(identifier.published)
def test_create_metadata(self): doi = doi_lib.get_or_create_doi(self.package_dict[u'id']) # Build the metadata dict to pass to DataCite service metadata_dict = doi_lib.build_metadata(self.package_dict, doi) # Perform some basic checks against the data - we require at the very least # title and author fields - they're mandatory in the DataCite Schema # This will only be an issue if another plugin has removed a mandatory field doi_lib.validate_metadata(metadata_dict)
def after_update(self, context, pkg_dict): '''Dataset has been created / updated Check status of the dataset to determine if we should publish DOI to datacite network :param pkg_dict: return: pkg_dict :param context: :returns: pkg_dict ''' # Is this active and public? If so we need to make sure we have an active DOI if pkg_dict.get(u'state', u'active') == u'active' and not pkg_dict.get( u'private', False): package_id = pkg_dict[u'id'] # remove user-defined update schemas first (if needed) context.pop(u'schema', None) # Load the original package, so we can determine if user has changed any fields orig_pkg_dict = toolkit.get_action(u'package_show')( context, { u'id': package_id }) # If metadata_created isn't populated in pkg_dict, copy from the original if u'metadata_created' not in pkg_dict: pkg_dict[u'metadata_created'] = orig_pkg_dict.get( u'metadata_created', u'') # Load the local DOI doi = get_doi(package_id) # If we don't have a DOI, create one # This could happen if the DOI module is enabled after a dataset has been # created if not doi: doi = get_or_create_doi(package_id) # Build the metadata dict to pass to DataCite service metadata_dict = build_metadata(pkg_dict, doi) # Perform some basic checks against the data - we require at the very least # title and author fields - they're mandatory in the DataCite Schema # This will only be an issue if another plugin has removed a mandatory field validate_metadata(metadata_dict) # Is this an existing DOI? Update it if doi.published: # Before updating, check if any of the metadata has been changed - # otherwise # We end up sending loads of revisions to DataCite for minor edits # Load the current version orig_metadata_dict = build_metadata(orig_pkg_dict, doi) # Check if the two dictionaries are the same if cmp(orig_metadata_dict, metadata_dict) != 0: # Not the same, so we want to update the metadata update_doi(package_id, **metadata_dict) toolkit.h.flash_success(u'DataCite DOI metadata updated') # TODO: If editing a dataset older than 5 days, create DOI revision # New DOI - publish to datacite else: toolkit.h.flash_success(u'DataCite DOI created') publish_doi(package_id, **metadata_dict) return pkg_dict
def test_publish_to_datacite(self, mock_request): doi = doi_lib.get_or_create_doi(self.package_dict[u'id']) metadata_dict = doi_lib.build_metadata(self.package_dict, doi) doi_lib.validate_metadata(metadata_dict) doi_lib.publish_doi(self.package_dict[u'id'], **metadata_dict)