コード例 #1
0
ファイル: plugin.py プロジェクト: ERC-data/ckanext-doi
    def after_update(self, context, pkg_dict):
        """
        Dataset has been created / updated
        Check status of the dataset to determine if we should publish DOI to datacite network

        @param pkg_dict:
        @return: pkg_dict
        """

        # Is this active and public? If so we need to make sure we have an active DOI
        if pkg_dict.get('state', 'active') == 'active' and not pkg_dict.get('private', False):

            package_id = pkg_dict['id']

            # Load the original package, so we can determine if user has changed any fields
            orig_pkg_dict = get_action('package_show')(context, {'id': package_id})

            # Metadata created isn't populated in pkg_dict - so copy from the original
            pkg_dict['metadata_created'] = orig_pkg_dict['metadata_created']

            # Load the local DOI
            doi = get_doi(package_id)

            # If we don't have a DOI, create one
            # This could happen if the DOI module is enabled after a dataset has been creates

            if not doi:
                doi = create_unique_identifier(package_id)

            # Build the metadata dict to pass to DataCite service
            metadata_dict = build_metadata(pkg_dict, doi)

            # Perform some basic checks against the data - we require at the very least
            # title and author fields - they're mandatory in the DataCite Schema
            # This will only be an issue if another plugin has removed a mandatory field
            validate_metadata(metadata_dict)

            # Is this an existing DOI? Update it
            if doi.published:

                # Before updating, check if any of the metadata has been changed - otherwise
                # We end up sending loads of revisions to DataCite for minor edits
                # Load the current version
                orig_metadata_dict = build_metadata(orig_pkg_dict, doi)
                # Check if the two dictionaries are the same
                if cmp(orig_metadata_dict, metadata_dict) != 0:
                    # Not the same, so we want to update the metadata
                    update_doi(package_id, **metadata_dict)
                    h.flash_success('DataCite DOI metadata updated')

                # TODO: If editing a dataset older than 5 days, create DOI revision

            # New DOI - publish to datacite
            else:
                h.flash_success('DataCite DOI created')
                publish_doi(package_id, **metadata_dict)

        return pkg_dict
コード例 #2
0
ファイル: test_doi.py プロジェクト: qlands/ckanext-doi
    def test_create_metadata(self):
        doi = doi_lib.get_or_create_doi(self.package_dict[u'id'])

        # Build the metadata dict to pass to DataCite service
        metadata_dict = doi_lib.build_metadata(self.package_dict, doi)

        # Perform some basic checks against the data - we require at the very least
        # title and author fields - they're mandatory in the DataCite Schema
        # This will only be an issue if another plugin has removed a mandatory field
        doi_lib.validate_metadata(metadata_dict)
コード例 #3
0
ファイル: test_doi.py プロジェクト: okfn/ckanext-doi
    def test_doi_metadata(self):
        '''
        Test the creation and validation of metadata
        '''
        pkg = factories.Dataset(author='Ben', auto_doi_identifier=True,
                                doi_identifier=None, doi_prefix='10.5072/FK2')

        doi = doi_lib.get_doi(pkg['id'])

        # Build the metadata dict to pass to DataCite service
        metadata_dict = doi_lib.build_metadata(pkg, doi)

        # Perform some basic checks against the data - we require at the very
        # least title and author fields - they're mandatory in the DataCite
        # Schema. This will only be an issue if another plugin has removed a
        # mandatory field
        doi_lib.validate_metadata(metadata_dict)
コード例 #4
0
ファイル: test_doi.py プロジェクト: okfn/ckanext-doi
    def test_doi_metadata_missing_author(self):
        '''Validating a DOI created from a package with no author will raise
        an exception.'''

        pkg = factories.Dataset(auto_doi_identifier=True, author='My Author',
                                doi_identifier=None, doi_prefix='10.5072/FK2')

        doi = doi_lib.get_doi(pkg['id'])

        # remove author value from pkg_dict before attempting validation
        pkg['author'] = None

        # Build the metadata dict to pass to DataCite service
        metadata_dict = doi_lib.build_metadata(pkg, doi)

        # No author in pkg_dict, so exception should be raised
        assert_raises(DOIMetadataException, doi_lib.validate_metadata,
                      metadata_dict)
コード例 #5
0
    def test_doi_publish_datacite(self):

        import ckanext.doi.lib as doi_lib

        doi = doi_lib.get_doi(self.package_dict['id'])

        if not doi:
            doi = doi_lib.create_unique_identifier(self.package_dict['id'])

        # Build the metadata dict to pass to DataCite service
        metadata_dict = doi_lib.build_metadata(self.package_dict, doi)

        # Perform some basic checks against the data - we require at the very least
        # title and author fields - they're mandatory in the DataCite Schema
        # This will only be an issue if another plugin has removed a mandatory field
        doi_lib.validate_metadata(metadata_dict)

        doi_lib.publish_doi(self.package_dict['id'], **metadata_dict)
コード例 #6
0
ファイル: test_doi.py プロジェクト: ERC-data/ckanext-doi
    def test_doi_publish_datacite(self):

        import ckanext.doi.lib as doi_lib

        doi = doi_lib.get_doi(self.package_dict['id'])

        if not doi:
            doi = doi_lib.create_unique_identifier(self.package_dict['id'])

        # Build the metadata dict to pass to DataCite service
        metadata_dict = doi_lib.build_metadata(self.package_dict, doi)

        # Perform some basic checks against the data - we require at the very least
        # title and author fields - they're mandatory in the DataCite Schema
        # This will only be an issue if another plugin has removed a mandatory field
        doi_lib.validate_metadata(metadata_dict)

        doi_lib.publish_doi(self.package_dict['id'], **metadata_dict)
コード例 #7
0
    def after_update(self, context, pkg_dict):
        '''Dataset has been created / updated
        Check status of the dataset to determine if we should publish DOI to datacite
        network

        :param pkg_dict: return: pkg_dict
        :param context:
        :returns: pkg_dict

        '''
        # Is this active and public? If so we need to make sure we have an active DOI
        if pkg_dict.get(u'state', u'active') == u'active' and not pkg_dict.get(
                u'private', False):

            package_id = pkg_dict[u'id']

            # remove user-defined update schemas first (if needed)
            context.pop(u'schema', None)
            # Load the original package, so we can determine if user has changed any fields
            orig_pkg_dict = toolkit.get_action(u'package_show')(
                context, {
                    u'id': package_id
                })

            # If metadata_created isn't populated in pkg_dict, copy from the original
            if u'metadata_created' not in pkg_dict:
                pkg_dict[u'metadata_created'] = orig_pkg_dict.get(
                    u'metadata_created', u'')

            # Load the local DOI
            doi = get_doi(package_id)

            # If we don't have a DOI, create one
            # This could happen if the DOI module is enabled after a dataset has been
            # created
            if not doi:
                doi = get_or_create_doi(package_id)

            # Build the metadata dict to pass to DataCite service
            metadata_dict = build_metadata(pkg_dict, doi)

            # Perform some basic checks against the data - we require at the very least
            # title and author fields - they're mandatory in the DataCite Schema
            # This will only be an issue if another plugin has removed a mandatory field
            validate_metadata(metadata_dict)

            # Is this an existing DOI? Update it
            if doi.published:

                # Before updating, check if any of the metadata has been changed -
                # otherwise
                # We end up sending loads of revisions to DataCite for minor edits
                # Load the current version
                orig_metadata_dict = build_metadata(orig_pkg_dict, doi)
                # Check if the two dictionaries are the same
                if cmp(orig_metadata_dict, metadata_dict) != 0:
                    # Not the same, so we want to update the metadata
                    update_doi(package_id, **metadata_dict)
                    toolkit.h.flash_success(u'DataCite DOI metadata updated')

                    # TODO: If editing a dataset older than 5 days, create DOI revision

            # New DOI - publish to datacite
            else:
                toolkit.h.flash_success(u'DataCite DOI created')
                publish_doi(package_id, **metadata_dict)

        return pkg_dict
コード例 #8
0
ファイル: plugin.py プロジェクト: LukeTully/ckanext-doi
    def after_update(self, context, pkg_dict):
        """
        Dataset has been created / updated
        Check status of the dataset to determine if we should publish DOI to datacite network

        @param pkg_dict:
        @return: pkg_dict
        """

        package_id = pkg_dict['id']

        # Load the original package, so we can determine if user has changed any fields
        orig_pkg_dict = get_action('package_show')(context, {'id': package_id})

        # Metadata created isn't populated in pkg_dict - so copy from the original
        pkg_dict['metadata_created'] = orig_pkg_dict['metadata_created']

        # Load the local DOI
        doi = get_doi(package_id)

        # Auto create overwrites anything in identifier
        # a DOI or identifier might already exist and in this case that DOI will be used
        if not doi:
            if 'doi_auto_create' in pkg_dict:

                # Overwrite any existing identifier with a newly minted DOI
                create_unique_identifier(package_id)
                # Remove the auto create field from the dataset pkg
                pkg_dict.pop('doi_auto_create')
            else:
                return pkg_dict

        # TODO: Handle manual input again
        # Is this active and public? If so we need to make sure we have an active DOI
        if pkg_dict.get('state', 'active') == 'active' and not pkg_dict.get(
                'private', False):

            # Build the metadata dict to pass to DataCite service
            metadata_dict = build_metadata(pkg_dict, doi)

            # Perform some basic checks against the data - we require at the very least
            # title and author fields - they're mandatory in the DataCite Schema
            # This will only be an issue if another plugin has removed a mandatory field
            validate_metadata(metadata_dict)

            # Is this an existing DOI? Update it
            if doi.published:

                # Before updating, check if any of the metadata has been changed - otherwise
                # We end up sending loads of revisions to DataCite for minor edits
                # Load the current version
                orig_metadata_dict = build_metadata(orig_pkg_dict, doi)
                # Check if the two dictionaries are the same
                if cmp(orig_metadata_dict, metadata_dict) != 0:
                    # Not the same, so we want to update the metadata
                    update_doi(package_id, **metadata_dict)
                    h.flash_success('DataCite DOI metadata updated')
                    # TODO: If editing a dataset older than 5 days, create DOI revision

            # New DOI - publish to datacite
            else:
                h.flash_success('DataCite DOI created')
                publish_doi(package_id, **metadata_dict)

        return pkg_dict
コード例 #9
0
ファイル: test_doi.py プロジェクト: qlands/ckanext-doi
    def test_publish_to_datacite(self, mock_request):
        doi = doi_lib.get_or_create_doi(self.package_dict[u'id'])
        metadata_dict = doi_lib.build_metadata(self.package_dict, doi)
        doi_lib.validate_metadata(metadata_dict)

        doi_lib.publish_doi(self.package_dict[u'id'], **metadata_dict)
コード例 #10
0
ファイル: plugin.py プロジェクト: okfn/ckanext-doi
    def after_update(self, context, pkg_dict):
        '''
        Dataset has been created / updated. Check status of the dataset to
        determine if we should publish DOI.

        @param pkg_dict:
        @return: pkg_dict
        '''

        package_id = pkg_dict['id']
        # Load the local DOI
        doi = get_doi(package_id)

        # If we're not auto managing the doi, but there is a DOI object
        # associated with the package, delete it.
        if not pkg_dict.get('auto_doi_identifier') and doi:
            delete_doi(package_id)
            doi = None

        # We might be short circuiting the after_update
        if context.get('no_after_update') \
           or not pkg_dict.get('auto_doi_identifier'):
            return pkg_dict

        # If we don't have a DOI, create one.
        # This could happen if the DOI module is enabled after a dataset
        # has been created, or if a user has added their own on dataset
        # creation, but subsequently deleted it.
        if not doi:
            prefix = pkg_dict.get('doi_prefix')
            doi = create_unique_identifier(package_id, prefix)

        # ensure doi.identifier and pkg['doi_identifier'] are the same
        if doi.identifier != pkg_dict['doi_identifier']:
            self._update_pkg_doi(context, package_id, doi.identifier)

        # Is this active and public? If so we need to make sure we have an
        # active DOI
        if pkg_dict.get('state', 'active') == 'active' \
           and not pkg_dict.get('private', False):

            # Load the original package, so we can determine if user has
            # changed any fields
            orig_pkg_dict = get_action('package_show')(context,
                                                       {'id': package_id})

            # Metadata created isn't populated in pkg_dict - so copy from the
            # original
            pkg_dict['metadata_created'] = orig_pkg_dict['metadata_created']

            # Build the metadata dict to pass to DataCite service
            metadata_dict = build_metadata(pkg_dict, doi)

            # Perform some basic checks against the data - we require at the
            # very least title and author fields - they're mandatory in the
            # DataCite Schema This will only be an issue if another plugin has
            # removed a mandatory field
            validate_metadata(metadata_dict)

            # Is this an existing DOI? Update it
            if doi.published:
                # Before updating, check if any of the metadata has been
                # changed - otherwise we end up sending loads of revisions to
                # DataCite for minor edits Load the current version
                orig_metadata_dict = build_metadata(orig_pkg_dict, doi)
                # Check if the two dictionaries are the same
                if cmp(orig_metadata_dict, metadata_dict) != 0:
                    # Not the same, so we want to update the metadata
                    update_doi(package_id, **metadata_dict)

                # TODO: If editing a dataset older than 5 days, create DOI
                # revision

            # New DOI - publish to datacite
            else:
                publish_doi(package_id, **metadata_dict)

        return pkg_dict