Exemplo n.º 1
0
def on_node_updated(node_id,
                    user_id,
                    first_save,
                    saved_fields,
                    request_headers=None):
    # WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable
    # transactions are implemented in View and Task application layers.
    AbstractNode = apps.get_model('osf.AbstractNode')
    node = AbstractNode.load(node_id)

    if node.is_collection or node.archiving or node.is_quickfiles:
        return

    need_update = bool(node.SEARCH_UPDATE_FIELDS.intersection(saved_fields))
    # due to async nature of call this can issue a search update for a new record (acceptable trade-off)
    if bool({'spam_status', 'is_deleted',
             'deleted'}.intersection(saved_fields)):
        need_update = True
    elif not node.is_public and 'is_public' not in saved_fields:
        need_update = False

    if need_update:
        node.update_search()
        if settings.SHARE_ENABLED:
            update_share(node)
        update_collecting_metadata(node, saved_fields)

    if node.get_identifier_value('doi') and bool(
            node.IDENTIFIER_UPDATE_FIELDS.intersection(saved_fields)):
        node.request_identifier_update(category='doi')
Exemplo n.º 2
0
 def delete(self, request, *args, **kwargs):
     node = self.get_object()
     update_share(node)
     update_admin_log(user_id=self.request.user.id,
                      object_id=node._id,
                      object_repr='Node',
                      message='Node Reindexed (SHARE): {}'.format(node._id),
                      action_flag=REINDEX_SHARE)
     if isinstance(node, (Node, Registration)):
         return redirect(reverse_node(self.kwargs.get('guid')))
Exemplo n.º 3
0
 def post(self, request, *args, **kwargs):
     preprint = self.get_object()
     if settings.SHARE_ENABLED:
         update_share(preprint)
     update_admin_log(user_id=self.request.user.id,
                      object_id=preprint._id,
                      object_repr='Preprint',
                      message=f'Preprint Reindexed (SHARE): {preprint._id}',
                      action_flag=REINDEX_SHARE)
     return redirect(self.get_success_url())
Exemplo n.º 4
0
    def test_no_call_async_update_on_400_failure(self, mock_share, preprint):
        mock_share.replace(responses.POST, f'{settings.SHARE_URL}api/v2/normalizeddata/', status=400)

        mock_share._calls.reset()  # reset after factory calls
        update_share(preprint)

        assert len(mock_share.calls) == 1
        data = json.loads(mock_share.calls[0].request.body.decode())
        graph = data['data']['attributes']['data']['@graph']
        data = next(data for data in graph if data['@type'] == 'preprint')
        assert data['title'] == preprint.title
Exemplo n.º 5
0
    def post(self, request, *args, **kwargs):
        node = self.get_object()
        if settings.SHARE_ENABLED:
            update_share(node)

        update_admin_log(user_id=self.request.user.id,
                         object_id=node._id,
                         object_repr='Node',
                         message=f'Node Reindexed (SHARE): {node._id}',
                         action_flag=REINDEX_SHARE)
        return redirect(self.get_success_url())
Exemplo n.º 6
0
 def delete(self, request, *args, **kwargs):
     preprint = self.get_object()
     if settings.SHARE_ENABLED:
         update_share(preprint)
     update_admin_log(user_id=self.request.user.id,
                      object_id=preprint._id,
                      object_repr='Preprint',
                      message='Preprint Reindexed (SHARE): {}'.format(
                          preprint._id),
                      action_flag=REINDEX_SHARE)
     return redirect(reverse_preprint(self.kwargs.get('guid')))
def reindex_provider(provider):
    preprints = Preprint.objects.filter(provider=provider)
    if preprints:
        logger.info('Sending {} preprints to SHARE...'.format(provider.preprints.count()))
        for preprint in preprints:
            update_share(preprint)

    nodes = AbstractNode.objects.filter(provider=provider)
    if nodes:
        logger.info('Sending {} AbstractNodes to SHARE...'.format(AbstractNode.objects.filter(provider=provider).count()))
        for abstract_node in nodes:
            update_share(abstract_node)
Exemplo n.º 8
0
    def _on_complete(self, event_data):
        super()._on_complete(event_data)
        NodeLog = apps.get_model('osf.NodeLog')

        self.date_retracted = timezone.now()
        self.save()

        parent_registration = self.target_registration
        parent_registration.registered_from.add_log(
            action=NodeLog.RETRACTION_APPROVED,
            params={
                'node': parent_registration.registered_from._id,
                'retraction_id': self._id,
                'registration': parent_registration._id
            },
            auth=Auth(self.initiated_by),
        )

        # TODO: Move this into the registration to be re-used in Forced Withdrawal
        # Remove any embargoes associated with the registration
        if parent_registration.embargo_end_date or parent_registration.is_pending_embargo:
            # Alter embargo state to make sure registration doesn't accidentally get published
            parent_registration.embargo.state = self.REJECTED
            parent_registration.embargo.approval_stage = (
                SanctionStates.MODERATOR_REJECTED
                if self.is_moderated else SanctionStates.REJECTED)

            parent_registration.registered_from.add_log(
                action=NodeLog.EMBARGO_CANCELLED,
                params={
                    'node': parent_registration.registered_from._id,
                    'registration': parent_registration._id,
                    'embargo_id': parent_registration.embargo._id,
                },
                auth=Auth(self.initiated_by),
            )
            parent_registration.embargo.save()

        # Ensure retracted registration is public
        # Pass auth=None because the registration initiator may not be
        # an admin on components (component admins had the opportunity
        # to disapprove the retraction by this point)
        for node in parent_registration.node_and_primary_descendants():
            node.set_privacy('public', auth=None, save=True, log=False)
            node.update_search()
        # force a save before sending data to share or retraction will not be updated
        self.save()

        if osf_settings.SHARE_ENABLED:
            update_share(parent_registration)
def migrate(dry_run):
    assert settings.SHARE_URL, 'SHARE_URL must be set to migrate.'
    assert settings.SHARE_API_TOKEN, 'SHARE_API_TOKEN must be set to migrate.'
    registrations = Registration.objects.filter(is_deleted=False,
                                                is_public=True)
    registrations_count = registrations.count()
    count = 0

    logger.info(
        'Preparing to migrate {} registrations.'.format(registrations_count))
    for registration in registrations.iterator():
        count += 1
        logger.info('{}/{} - {}'.format(count, registrations_count,
                                        registration._id))
        if not dry_run:
            update_share(registration)
        logger.info('Registration {} was sent to SHARE.'.format(
            registration._id))
Exemplo n.º 10
0
def migrate(registrations):
    assert settings.SHARE_URL, 'SHARE_URL must be set to migrate.'
    assert settings.SHARE_API_TOKEN, 'SHARE_API_TOKEN must be set to migrate.'
    registrations_count = len(registrations)

    count = 0

    logger.info(
        'Preparing to migrate {} registrations.'.format(registrations_count))
    for registration_id in registrations:
        count += 1
        logger.info('{}/{} - {}'.format(count, registrations_count,
                                        registration_id))
        registration = AbstractNode.load(registration_id)
        assert registration.type == 'osf.registration'
        update_share(registration)
        logger.info(
            'Registration {} was sent to SHARE.'.format(registration_id))
    def _on_complete(self, user):
        Registration = apps.get_model('osf.Registration')
        NodeLog = apps.get_model('osf.NodeLog')

        self.date_retracted = timezone.now()
        self.save()

        parent_registration = Registration.objects.get(retraction=self)
        parent_registration.registered_from.add_log(
            action=NodeLog.RETRACTION_APPROVED,
            params={
                'node': parent_registration.registered_from._id,
                'retraction_id': self._id,
                'registration': parent_registration._id
            },
            auth=Auth(self.initiated_by),
        )
        # Remove any embargoes associated with the registration
        if parent_registration.embargo_end_date or parent_registration.is_pending_embargo:
            parent_registration.embargo.state = self.REJECTED
            parent_registration.registered_from.add_log(
                action=NodeLog.EMBARGO_CANCELLED,
                params={
                    'node': parent_registration.registered_from._id,
                    'registration': parent_registration._id,
                    'embargo_id': parent_registration.embargo._id,
                },
                auth=Auth(self.initiated_by),
            )
            parent_registration.embargo.save()
        # Ensure retracted registration is public
        # Pass auth=None because the registration initiator may not be
        # an admin on components (component admins had the opportunity
        # to disapprove the retraction by this point)
        for node in parent_registration.node_and_primary_descendants():
            node.set_privacy('public', auth=None, save=True, log=False)
            node.update_search()
        # force a save before sending data to share or retraction will not be updated
        self.save()

        if osf_settings.SHARE_ENABLED:
            update_share(parent_registration)
def remove_search_index(dry_run=True):
    tag_query = Q()
    title_query = Q()
    for tag in DO_NOT_INDEX_LIST['tags']:
        tag_query |= Q(tags__name=tag)

    for title in DO_NOT_INDEX_LIST['titles']:
        title_query |= Q(title__contains=title)

    increment = 20
    nodes = paginated(AbstractNode,
                      query=Q(is_public=True) & (tag_query | title_query),
                      increment=increment,
                      each=True)
    if dry_run:
        logger.warn('Dry run mode.')
        for node in nodes:
            logger.info(
                'Removing {} with title \'{}\' from search index and SHARE.'.
                format(node._id, node.title))
    else:
        for node in nodes:
            update_node(node, bulk=False, async=True)
            update_share(node)
Exemplo n.º 13
0
    def test_delete_from_share(self, mock_share):
        preprint = PreprintFactory()
        update_share(preprint)

        data = json.loads(mock_share.calls[-1].request.body.decode())
        graph = data['data']['attributes']['data']['@graph']
        share_preprint = next(n for n in graph if n['@type'] == 'preprint')
        assert not share_preprint['is_deleted']

        preprint.date_withdrawn = datetime.now()
        update_share(preprint)

        data = json.loads(mock_share.calls[-1].request.body.decode())
        graph = data['data']['attributes']['data']['@graph']
        share_preprint = next(n for n in graph if n['@type'] == 'preprint')
        assert not share_preprint['is_deleted']

        preprint.spam_status = SpamStatus.SPAM
        update_share(preprint)

        data = json.loads(mock_share.calls[-1].request.body.decode())
        graph = data['data']['attributes']['data']['@graph']
        share_preprint = next(n for n in graph if n['@type'] == 'preprint')
        assert share_preprint['is_deleted']