Esempio n. 1
0
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
    abs_url = None
    if hasattr(instance, 'absolute_api_v2_url'):
        abs_url = instance.absolute_api_v2_url

    if abs_url is not None:
        enqueue_postcommit_task(partial(ban_url, instance, fields_changed))
Esempio n. 2
0
def _update_comments_timestamp(auth,
                               node,
                               page=Comment.OVERVIEW,
                               root_id=None):
    if node.is_contributor(auth.user):
        enqueue_postcommit_task(ban_url, (node, ), {},
                                celery=False,
                                once_per_request=True)
        if root_id is not None:
            guid_obj = Guid.load(root_id)
            if guid_obj is not None:
                enqueue_postcommit_task(ban_url, (guid_obj.referent, ), {},
                                        celery=False,
                                        once_per_request=True)

        # update node timestamp
        if page == Comment.OVERVIEW:
            root_id = node._id
        auth.user.comments_viewed_timestamp[root_id] = datetime.utcnow()
        auth.user.save()
        return {
            root_id: auth.user.comments_viewed_timestamp[root_id].isoformat()
        }
    else:
        return {}
Esempio n. 3
0
 def save_changes(self, ev):
     node = self.machineable.node
     node._has_abandoned_preprint = False
     now = self.action.created if self.action is not None else timezone.now(
     )
     should_publish = self.machineable.in_public_reviews_state
     if should_publish and not self.machineable.is_published:
         if not (self.machineable.node.preprint_file
                 and self.machineable.node.preprint_file.node
                 == self.machineable.node):
             raise ValueError(
                 'Preprint node is not a valid preprint; cannot publish.')
         if not self.machineable.provider:
             raise ValueError(
                 'Preprint provider not specified; cannot publish.')
         if not self.machineable.subjects.exists():
             raise ValueError(
                 'Preprint must have at least one subject to be published.')
         self.machineable.date_published = now
         self.machineable.is_published = True
         enqueue_postcommit_task(get_and_set_preprint_identifiers, (),
                                 {'preprint_id': self.machineable._id},
                                 celery=True)
     elif not should_publish and self.machineable.is_published:
         self.machineable.is_published = False
     self.machineable.save()
     node.save()
def _update_comments_timestamp(auth,
                               node,
                               page=Comment.OVERVIEW,
                               root_id=None):
    if node.is_contributor_or_group_member(auth.user):
        enqueue_postcommit_task(ban_url, (node, ), {},
                                celery=False,
                                once_per_request=True)
        if root_id is not None:
            guid_obj = Guid.load(root_id)
            if guid_obj is not None:
                # FIXME: Doesn't work because we're not using Vanish anymore
                # enqueue_postcommit_task(ban_url, (self.get_node(),), {}, celery=False, once_per_request=True)
                pass

        # update node timestamp
        if page == Comment.OVERVIEW:
            root_id = node._id
        auth.user.comments_viewed_timestamp[root_id] = timezone.now()
        auth.user.save()
        return {
            root_id: auth.user.comments_viewed_timestamp[root_id].isoformat()
        }
    else:
        return {}
Esempio n. 5
0
def update_or_enqueue_on_preprint_updated(preprint_id,
                                          update_share=True,
                                          share_type=None,
                                          old_subjects=None,
                                          saved_fields=None):
    task = get_task_from_postcommit_queue(
        'website.preprints.tasks.on_preprint_updated',
        predicate=lambda task: task.kwargs['preprint_id'] == preprint_id)
    if task:
        old_subjects = old_subjects or []
        task_subjects = task.kwargs['old_subjects'] or []
        task.kwargs[
            'update_share'] = update_share or task.kwargs['update_share']
        task.kwargs['share_type'] = share_type or task.kwargs['share_type']
        task.kwargs['old_subjects'] = old_subjects + task_subjects
        task.kwargs['saved_fields'] = list(
            set(task.kwargs['saved_fields']).union(saved_fields))
    else:
        enqueue_postcommit_task(on_preprint_updated, (), {
            'preprint_id': preprint_id,
            'old_subjects': old_subjects,
            'update_share': update_share,
            'share_type': share_type,
            'saved_fields': saved_fields
        },
                                celery=True)
def checkin_files_by_user(node, user):
    """ Listens to a contributor or group member being removed to check in all of their files
    """
    enqueue_postcommit_task(checkin_files_task, (
        node._id,
        user._id,
    ), {},
                            celery=True)
Esempio n. 7
0
    def save(self, *args, **kwargs):
        first_save = not bool(self.pk)
        saved_fields = self.get_dirty_fields() or []
        old_subjects = kwargs.pop('old_subjects', [])
        ret = super(PreprintService, self).save(*args, **kwargs)

        if (not first_save and 'is_published' in saved_fields) or self.is_published:
            enqueue_postcommit_task(on_preprint_updated, (self._id,), {'old_subjects': old_subjects}, celery=True)
        return ret
Esempio n. 8
0
    def save(self, *args, **kwargs):
        first_save = not bool(self.pk)
        saved_fields = self.get_dirty_fields() or []
        old_subjects = kwargs.pop('old_subjects', [])
        ret = super(PreprintService, self).save(*args, **kwargs)

        if (not first_save and 'is_published' in saved_fields) or self.is_published:
            enqueue_postcommit_task(on_preprint_updated, (self._id,), {'old_subjects': old_subjects}, celery=True)
        return ret
Esempio n. 9
0
def update_storage_usage(target):
    Preprint = apps.get_model('osf.preprint')

    if settings.ENABLE_STORAGE_USAGE_CACHE and not isinstance(
            target, Preprint) and not target.is_quickfiles:
        enqueue_postcommit_task(update_storage_usage_cache, (
            target.id,
            target._id,
        ), {},
                                celery=True)
Esempio n. 10
0
def _update_comments_timestamp(auth, node, page=Comment.OVERVIEW, root_id=None):
    if node.is_contributor(auth.user):
        enqueue_postcommit_task(ban_url, (node, ), {}, celery=False, once_per_request=True)
        if root_id is not None:
            guid_obj = Guid.load(root_id)
            if guid_obj is not None:
                enqueue_postcommit_task(ban_url, (guid_obj.referent, ), {}, celery=False, once_per_request=True)

        # update node timestamp
        if page == Comment.OVERVIEW:
            root_id = node._id
        auth.user.comments_viewed_timestamp[root_id] = datetime.utcnow()
        auth.user.save()
        return {root_id: auth.user.comments_viewed_timestamp[root_id].isoformat()}
    else:
        return {}
Esempio n. 11
0
    def set_published(self, published, auth, save=False):
        if not self.node.has_permission(auth.user, ADMIN):
            raise PermissionsError('Only admins can publish a preprint.')

        if self.is_published and not published:
            raise ValueError('Cannot unpublish preprint.')

        self.is_published = published

        if published:
            if not (self.node.preprint_file
                    and self.node.preprint_file.node == self.node):
                raise ValueError(
                    'Preprint node is not a valid preprint; cannot publish.')
            if not self.provider:
                raise ValueError(
                    'Preprint provider not specified; cannot publish.')
            if not self.subjects.exists():
                raise ValueError(
                    'Preprint must have at least one subject to be published.')
            self.date_published = timezone.now()
            self.node._has_abandoned_preprint = False

            # In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state
            self.reviews_state = States.ACCEPTED.value
            self.date_last_transitioned = self.date_published

            self.node.add_log(
                action=NodeLog.PREPRINT_INITIATED,
                params={'preprint': self._id},
                auth=auth,
                save=False,
            )

            if not self.node.is_public:
                self.node.set_privacy(self.node.PUBLIC, auth=None, log=True)

            # This should be called after all fields for EZID metadta have been set
            enqueue_postcommit_task(get_and_set_preprint_identifiers, (),
                                    {'preprint': self},
                                    celery=True)

            self._send_preprint_confirmation(auth)

        if save:
            self.node.save()
            self.save()
Esempio n. 12
0
    def set_published(self, published, auth, save=False):
        if not self.node.has_permission(auth.user, ADMIN):
            raise PermissionsError('Only admins can publish a preprint.')

        if self.is_published and not published:
            raise ValueError('Cannot unpublish preprint.')

        self.is_published = published

        if published:
            if not (self.node.preprint_file and self.node.preprint_file.node == self.node):
                raise ValueError('Preprint node is not a valid preprint; cannot publish.')
            if not self.provider:
                raise ValueError('Preprint provider not specified; cannot publish.')
            if not self.subjects.exists():
                raise ValueError('Preprint must have at least one subject to be published.')
            self.date_published = timezone.now()
            self.node._has_abandoned_preprint = False

            # In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state
            self.machine_state = DefaultStates.ACCEPTED.value
            self.date_last_transitioned = self.date_published

            self.node.add_log(
                action=NodeLog.PREPRINT_INITIATED,
                params={
                    'preprint': self._id
                },
                auth=auth,
                save=False,
            )

            if not self.node.is_public:
                self.node.set_privacy(
                    self.node.PUBLIC,
                    auth=None,
                    log=True
                )

            # This should be called after all fields for EZID metadta have been set
            enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self._id}, celery=True)

            self._send_preprint_confirmation(auth)

        if save:
            self.node.save()
            self.save()
Esempio n. 13
0
def _update_comments_timestamp(auth, node, page=Comment.OVERVIEW, root_id=None):
    if node.is_contributor(auth.user):
        enqueue_postcommit_task(ban_url, (node, ), {}, celery=False, once_per_request=True)
        if root_id is not None:
            guid_obj = Guid.load(root_id)
            if guid_obj is not None:
                # FIXME: Doesn't work because we're not using Vanish anymore
                # enqueue_postcommit_task(ban_url, (self.get_node(),), {}, celery=False, once_per_request=True)
                pass

        # update node timestamp
        if page == Comment.OVERVIEW:
            root_id = node._id
        auth.user.comments_viewed_timestamp[root_id] = timezone.now()
        auth.user.save()
        return {root_id: auth.user.comments_viewed_timestamp[root_id].isoformat()}
    else:
        return {}
Esempio n. 14
0
 def save_changes(self, ev):
     node = self.machineable.node
     node._has_abandoned_preprint = False
     now = self.action.created if self.action is not None else timezone.now()
     should_publish = self.machineable.in_public_reviews_state
     if should_publish and not self.machineable.is_published:
         if not (self.machineable.node.preprint_file and self.machineable.node.preprint_file.node == self.machineable.node):
             raise ValueError('Preprint node is not a valid preprint; cannot publish.')
         if not self.machineable.provider:
             raise ValueError('Preprint provider not specified; cannot publish.')
         if not self.machineable.subjects.exists():
             raise ValueError('Preprint must have at least one subject to be published.')
         self.machineable.date_published = now
         self.machineable.is_published = True
         enqueue_postcommit_task(get_and_set_preprint_identifiers, (), {'preprint_id': self.machineable._id}, celery=True)
     elif not should_publish and self.machineable.is_published:
         self.machineable.is_published = False
     self.machineable.save()
     node.save()
Esempio n. 15
0
def update_or_enqueue_on_preprint_updated(preprint_id, update_share=True, share_type=None, old_subjects=None, saved_fields=None):
    task = get_task_from_postcommit_queue(
        'website.preprints.tasks.on_preprint_updated',
        predicate=lambda task: task.kwargs['preprint_id'] == preprint_id
    )
    if task:
        old_subjects = old_subjects or []
        task_subjects = task.kwargs['old_subjects'] or []
        saved_fields = saved_fields or {}
        task_saved_fields = task.kwargs['saved_fields'] or {}
        task_saved_fields.update(saved_fields)
        task.kwargs['update_share'] = update_share or task.kwargs['update_share']
        task.kwargs['share_type'] = share_type or task.kwargs['share_type']
        task.kwargs['old_subjects'] = old_subjects + task_subjects
        task.kwargs['saved_fields'] = task_saved_fields or task.kwargs['saved_fields']
    else:
        enqueue_postcommit_task(
            on_preprint_updated,
            (),
            {'preprint_id': preprint_id, 'old_subjects': old_subjects, 'update_share': update_share, 'share_type': share_type, 'saved_fields': saved_fields},
            celery=True
        )
Esempio n. 16
0
def update_ia_metadata(node, data=None):
    """
    This debounces/throttles requests by grabbing a pending task and overriding it instead of making a new one every
    pre-commit m2m change.

    IA wants us to brand our specific osf metadata with a `osf_` prefix. So we are following IA_MAPPED_NAMES.
    """
    if settings.IA_ARCHIVE_ENABLED:

        Registration = apps.get_model('osf.registration')
        if not data:
            allowed_metadata = Registration.SYNCED_WITH_IA.intersection(
                node.get_dirty_fields().keys())
            data = {key: str(getattr(node, key)) for key in allowed_metadata}

        for key in data.keys():
            data[Registration.IA_MAPPED_NAMES.get(key, key)] = data.pop(key)

        if node.moderation_state == RegistrationModerationStates.WITHDRAWN.db_name:
            data['withdrawal_justification'] = node.retraction.justification

        if getattr(node, 'ia_url', None) and node.is_public:
            task = get_task_from_postcommit_queue(
                'framework.celery_tasks._update_ia_metadata',
                predicate=lambda task: task.args[0] == node._id and data.keys(
                ) == task.args[1].keys())
            if task:
                task.args = (
                    node._id,
                    data,
                )
            else:
                enqueue_postcommit_task(_update_ia_metadata, (
                    node._id,
                    data,
                ), {},
                                        celery=True)
Esempio n. 17
0
    def save(self, *args, **kwargs):
        first_save = not bool(self.pk)
        saved_fields = self.get_dirty_fields() or []
        old_subjects = kwargs.pop('old_subjects', [])
        if saved_fields:
            request, user_id = get_request_and_user_id()
            request_headers = {}
            if not isinstance(request, DummyRequest):
                request_headers = {
                    k: v
                    for k, v in get_headers_from_request(request).items()
                    if isinstance(v, basestring)
                }
            user = OSFUser.load(user_id)
            if user:
                self.check_spam(user, saved_fields, request_headers)
        if not first_save and ('ever_public' in saved_fields and saved_fields['ever_public']):
            raise ValidationError('Cannot set "ever_public" to False')

        ret = super(PreprintService, self).save(*args, **kwargs)

        if (not first_save and 'is_published' in saved_fields) or self.is_published:
            enqueue_postcommit_task(on_preprint_updated, (self._id,), {'old_subjects': old_subjects}, celery=True)
        return ret
Esempio n. 18
0
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
    if hasattr(instance, 'absolute_api_v2_url'):
        enqueue_postcommit_task(ban_url, (instance, ), {},
                                celery=False,
                                once_per_request=True)
Esempio n. 19
0
def delete_files(node):
    enqueue_postcommit_task(delete_files_task, (node._id, ), {}, celery=True)
Esempio n. 20
0
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
    if hasattr(instance, "absolute_api_v2_url"):
        enqueue_postcommit_task((ban_url, (instance,)))
Esempio n. 21
0
def update_storage_usage(target):
    Preprint = apps.get_model('osf.preprint')

    if not isinstance(target, Preprint) and not target.is_quickfiles:
        enqueue_postcommit_task(update_storage_usage_cache, (target._id,), {}, celery=True)
Esempio n. 22
0
def archive_to_ia(node):
    if settings.IA_ARCHIVE_ENABLED:
        enqueue_postcommit_task(_archive_to_ia, (node._id, ), {}, celery=True)
Esempio n. 23
0
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
    if hasattr(instance, 'absolute_api_v2_url'):
        enqueue_postcommit_task(ban_url, (instance, ), {})
Esempio n. 24
0
def update_storage_usage(target):
    Preprint = apps.get_model('osf.preprint')

    if not isinstance(target, Preprint) and not target.is_quickfiles:
        enqueue_postcommit_task(update_storage_usage_cache, (target._id, ), {},
                                celery=True)
Esempio n. 25
0
def ban_object_from_cache(sender, instance, fields_changed, cached_data):
    if hasattr(instance, 'absolute_api_v2_url'):
        enqueue_postcommit_task(ban_url, (instance, ), {}, celery=False, once_per_request=True)