コード例 #1
0
def migrate_preprints(index, delete):
    logger.info('Migrating preprints to index: {}'.format(index))
    preprints = Preprint.objects.all()
    increment = 100
    paginator = Paginator(preprints, increment)
    for page_number in paginator.page_range:
        logger.info('Updating page {} / {}'.format(page_number, paginator.num_pages))
        Preprint.bulk_update_search(paginator.page(page_number).object_list, index=index)
コード例 #2
0
    def create(self, validated_data):
        creator = self.context['request'].user
        provider = validated_data.pop('provider', None)
        if not provider:
            raise exceptions.ValidationError(detail='You must specify a valid provider to create a preprint.')

        title = validated_data.pop('title')
        description = validated_data.pop('description', '')
        preprint = Preprint(provider=provider, title=title, creator=creator, description=description)
        preprint.save()

        return self.update(preprint, validated_data)
コード例 #3
0
ファイル: serializers.py プロジェクト: aaxelb/osf.io
    def create(self, validated_data):
        creator = self.context['request'].user
        provider = validated_data.pop('provider', None)
        if not provider:
            raise exceptions.ValidationError(detail='You must specify a valid provider to create a preprint.')

        title = validated_data.pop('title')
        description = validated_data.pop('description', '')
        preprint = Preprint(provider=provider, title=title, creator=creator, description=description)
        preprint.save()

        return self.update(preprint, validated_data)
コード例 #4
0
ファイル: views.py プロジェクト: jwalz/osf.io
 def perform_create(self, serializer):
     user = self.request.user
     preprint = Preprint.load(self.kwargs['preprint_id'])
     if not preprint:
         raise NotFound
     self.check_object_permissions(self.request, preprint)
     serializer.save(submitter=user, preprint=preprint)
コード例 #5
0
def _add_related_claimed_tag_to_user(pid, user):
    """
    Adds claimed tag to incoming users, depending on whether the resource has related source tags
    :param pid: guid of either the node or the preprint
    :param user: the claiming user
    """
    node = AbstractNode.load(pid)
    preprint = Preprint.load(pid)
    osf_claimed_tag, created = Tag.all_tags.get_or_create(
        name=provider_claimed_tag('osf'), system=True)
    if node:
        node_source_tags = node.all_tags.filter(name__icontains='source:',
                                                system=True)
        if node_source_tags.exists():
            for tag in node_source_tags:
                claimed_tag, created = Tag.all_tags.get_or_create(
                    name=NODE_SOURCE_TAG_CLAIMED_TAG_RELATION[tag.name],
                    system=True)
                user.add_system_tag(claimed_tag)
        else:
            user.add_system_tag(osf_claimed_tag)
    elif preprint:
        provider_id = preprint.provider._id
        preprint_claimed_tag, created = Tag.all_tags.get_or_create(
            name=provider_claimed_tag(provider_id, 'preprint'), system=True)
        user.add_system_tag(preprint_claimed_tag)
コード例 #6
0
    def test_create_preprint_with_supplemental_private_project(self):
        private_project_payload = build_preprint_create_payload(
            self.private_project._id,
            self.provider._id,
            attrs={
                'subjects': [
                    [
                        SubjectFactory()._id]],
            })
        res = self.app.post_json_api(
            self.url,
            private_project_payload,
            auth=self.user.auth)

        assert_equal(res.status_code, 201)
        self.private_project.reload()
        assert_false(self.private_project.is_public)

        preprint = Preprint.load(res.json['data']['id'])
        res = self.publish_preprint(preprint, self.user)
        preprint.reload()
        assert_equal(res.status_code, 200)
        self.private_project.reload()
        assert_false(self.private_project.is_public)
        assert_true(preprint.is_public)
        assert_true(preprint.is_published)
コード例 #7
0
    def test_file_not_osfstorage(self):
        public_project_payload = build_preprint_create_payload(
            provider_id=self.provider._id)

        res = self.app.post_json_api(
            self.url,
            public_project_payload,
            auth=self.user.auth,
            expect_errors=True)

        preprint = Preprint.load(res.json['data']['id'])
        assert_equal(res.status_code, 201)

        github_file = test_utils.create_test_preprint_file(
            preprint, self.user, 'coffee_manuscript.pdf')
        github_file.recast(GithubFile._typedmodels_type)
        github_file.save()

        update_payload = build_preprint_update_payload(preprint._id, github_file._id)

        res = self.app.patch_json_api(
            self.url + '{}/'.format(preprint._id),
            update_payload,
            auth=self.user.auth,
            expect_errors=True
        )

        assert_equal(res.status_code, 400)
        assert_equal(
            res.json['errors'][0]['detail'],
            'This file is not a valid primary file for this preprint.')
コード例 #8
0
    def test_publish_preprint_fails_with_invalid_primary_file(self):
        no_file_payload = build_preprint_create_payload(
            node_id=self.public_project._id,
            provider_id=self.provider._id,
            attrs={
                'subjects': [[SubjectFactory()._id]],
            }
        )
        res = self.app.post_json_api(
            self.url,
            no_file_payload,
            auth=self.user.auth,
            expect_errors=True)

        assert_equal(res.status_code, 201)
        preprint = Preprint.load(res.json['data']['id'])
        update_payload = build_preprint_update_payload(preprint._id, 'fakefileid')

        res = self.app.patch_json_api(
            self.url + '{}/'.format(preprint._id),
            update_payload,
            auth=self.user.auth,
            expect_errors=True
        )

        assert_equal(res.status_code, 400)
        assert_equal(
            res.json['errors'][0]['detail'],
            'A valid primary_file must be set before publishing a preprint.')
コード例 #9
0
    def test_file_not_osfstorage(self):
        public_project_payload = build_preprint_create_payload(
            provider_id=self.provider._id)

        res = self.app.post_json_api(
            self.url,
            public_project_payload,
            auth=self.user.auth,
            expect_errors=True)

        preprint = Preprint.load(res.json['data']['id'])
        assert_equal(res.status_code, 201)

        github_file = test_utils.create_test_preprint_file(
            preprint, self.user, 'coffee_manuscript.pdf')
        github_file.recast(GithubFile._typedmodels_type)
        github_file.save()

        update_payload = build_preprint_update_payload(preprint._id, github_file._id)

        res = self.app.patch_json_api(
            self.url + '{}/'.format(preprint._id),
            update_payload,
            auth=self.user.auth,
            expect_errors=True
        )

        assert_equal(res.status_code, 400)
        assert_equal(
            res.json['errors'][0]['detail'],
            'This file is not a valid primary file for this preprint.')
コード例 #10
0
ファイル: permissions.py プロジェクト: mdicgovbr/osf.io
    def has_object_permission(self, request, view, obj):
        assert isinstance(obj, dict)
        auth = get_user_auth(request)
        collection = obj['self']
        has_collection_auth = auth.user and auth.user.has_perm(
            'write_collection', collection)

        if request.method in permissions.SAFE_METHODS:
            if collection.is_public:
                return True
        elif request.method == 'DELETE':
            return has_collection_auth

        if not has_collection_auth:
            return False
        pointer_objects = []
        for pointer in request.data.get('data', []):
            obj = AbstractNode.load(pointer['id']) or Preprint.load(
                pointer['id'])
            if not obj:
                raise NotFound(detail='Node with id "{}" was not found'.format(
                    pointer['id']))
            pointer_objects.append(obj)
        has_pointer_auth = True
        # TODO: is this necessary? get_object checks can_view
        for pointer in pointer_objects:
            if not pointer.can_view(auth):
                has_pointer_auth = False
                break
        return has_pointer_auth
コード例 #11
0
    def test_create_preprint_with_supplemental_private_project(self):
        private_project_payload = build_preprint_create_payload(
            self.private_project._id,
            self.provider._id,
            attrs={
                'subjects': [
                    [
                        SubjectFactory()._id]],
            })
        res = self.app.post_json_api(
            self.url,
            private_project_payload,
            auth=self.user.auth)

        assert_equal(res.status_code, 201)
        self.private_project.reload()
        assert_false(self.private_project.is_public)

        preprint = Preprint.load(res.json['data']['id'])
        res = self.publish_preprint(preprint, self.user)
        preprint.reload()
        assert_equal(res.status_code, 200)
        self.private_project.reload()
        assert_false(self.private_project.is_public)
        assert_true(preprint.is_public)
        assert_true(preprint.is_published)
コード例 #12
0
def async_update_resource_share(self, guid, old_subjects=None):
    """
    This function updates share  takes Preprints, Projects and Registrations.
    :param self:
    :param guid:
    :return:
    """
    AbstractNode = apps.get_model('osf.AbstractNode')
    resource = AbstractNode.load(guid)
    if not resource:
        Preprint = apps.get_model('osf.Preprint')
        resource = Preprint.load(guid)

    data = serialize_share_data(resource, old_subjects)
    resp = send_share_json(resource, data)
    try:
        resp.raise_for_status()
    except Exception as e:
        if self.request.retries == self.max_retries:
            log_exception()
        elif resp.status_code >= 500:
            try:
                self.retry(
                    exc=e,
                    countdown=(random.random() + 1) * min(
                        60 + settings.CELERY_RETRY_BACKOFF_BASE**
                        self.request.retries, 60 * 10),
                )
            except Retry:  # Retry is only raise after > 5 retries
                log_exception()
        else:
            log_exception()

    return resp
コード例 #13
0
ファイル: elastic_search.py プロジェクト: aaxelb/osf.io
def update_preprint_async(self, preprint_id, index=None, bulk=False):
    Preprint = apps.get_model('osf.Preprint')
    preprint = Preprint.load(preprint_id)
    try:
        update_preprint(preprint=preprint, index=index, bulk=bulk, async_update=True)
    except Exception as exc:
        self.retry(exc=exc)
コード例 #14
0
def _kwargs_to_nodes(kwargs):
    """Retrieve project and component objects from keyword arguments.

    :param dict kwargs: Dictionary of keyword arguments
    :return: Tuple of parent and node

    """
    node = kwargs.get('node') or kwargs.get('project')
    parent = kwargs.get('parent')
    if node:
        return parent, node

    pid = kwargs.get('pid')
    nid = kwargs.get('nid')
    if pid and nid:
        node = _load_node_or_fail(nid)
        parent = _load_node_or_fail(pid)
    elif pid and not nid:
        node = Preprint.load(pid)
        if not node:
            node = _load_node_or_fail(pid)
    elif nid and not pid:
        node = _load_node_or_fail(nid)
    elif not pid and not nid:
        raise HTTPError(http_status.HTTP_404_NOT_FOUND,
                        data={
                            'message_short':
                            'Node not found',
                            'message_long':
                            'No Node with that primary key could be found',
                        })
    return parent, node
コード例 #15
0
    def has_object_permission(self, request, view, obj):
        assert isinstance(obj, dict)
        auth = get_user_auth(request)
        collection = obj['self']
        has_collection_auth = auth.user and auth.user.has_perm('write_collection', collection)

        if request.method in permissions.SAFE_METHODS:
            if collection.is_public:
                return True
        elif request.method == 'DELETE':
            return has_collection_auth

        if not has_collection_auth:
            return False
        pointer_objects = []
        for pointer in request.data.get('data', []):
            obj = AbstractNode.load(pointer['id']) or Preprint.load(pointer['id'])
            if not obj:
                raise NotFound(detail='Node with id "{}" was not found'.format(pointer['id']))
            pointer_objects.append(obj)
        has_pointer_auth = True
        # TODO: is this necessary? get_object checks can_view
        for pointer in pointer_objects:
            if not pointer.can_view(auth):
                has_pointer_auth = False
                break
        return has_pointer_auth
コード例 #16
0
    def test_publish_preprint_fails_with_invalid_primary_file(self):
        no_file_payload = build_preprint_create_payload(
            node_id=self.public_project._id,
            provider_id=self.provider._id,
            attrs={
                'subjects': [[SubjectFactory()._id]],
            }
        )
        res = self.app.post_json_api(
            self.url,
            no_file_payload,
            auth=self.user.auth,
            expect_errors=True)

        assert_equal(res.status_code, 201)
        preprint = Preprint.load(res.json['data']['id'])
        update_payload = build_preprint_update_payload(preprint._id, 'fakefileid')

        res = self.app.patch_json_api(
            self.url + '{}/'.format(preprint._id),
            update_payload,
            auth=self.user.auth,
            expect_errors=True
        )

        assert_equal(res.status_code, 400)
        assert_equal(
            res.json['errors'][0]['detail'],
            'A valid primary_file must be set before publishing a preprint.')
コード例 #17
0
ファイル: views.py プロジェクト: CenterForOpenScience/osf.io
 def perform_create(self, serializer):
     user = self.request.user
     preprint = Preprint.load(self.kwargs['preprint_id'])
     if not preprint:
         raise NotFound
     self.check_object_permissions(self.request, preprint)
     serializer.save(submitter=user, preprint=preprint)
コード例 #18
0
ファイル: elastic_search.py プロジェクト: mdicgovbr/osf.io
def update_preprint_async(self, preprint_id, index=None, bulk=False):
    Preprint = apps.get_model('osf.Preprint')
    preprint = Preprint.load(preprint_id)
    try:
        update_preprint(preprint=preprint, index=index, bulk=bulk, async_update=True)
    except Exception as exc:
        self.retry(exc=exc)
コード例 #19
0
        def wrapped(*args, **kwargs):
            if preprints_valid and Preprint.load(kwargs.get('pid')):
                _inject_nodes(kwargs)

                return func(*args, **kwargs)

            if groups_valid and OSFGroup.load(kwargs.get('pid')):
                kwargs['node'] = OSFGroup.load(kwargs.get('pid'))
                return func(*args, **kwargs)

            _inject_nodes(kwargs)

            if getattr(kwargs['node'], 'is_collection',
                       True) or (getattr(kwargs['node'], 'is_quickfiles', True)
                                 and not quickfiles_valid):
                raise HTTPError(http_status.HTTP_404_NOT_FOUND)

            if not retractions_valid and getattr(kwargs['node'].retraction,
                                                 'is_retracted', False):
                raise HTTPError(
                    http_status.HTTP_400_BAD_REQUEST,
                    data=dict(
                        message_long=
                        'Viewing withdrawn registrations is not permitted'))
            else:
                return func(*args, **kwargs)
コード例 #20
0
 def get_preprint_provider(self, obj):
     preprint_id = obj.get('preprint', None)
     if preprint_id:
         preprint = Preprint.load(preprint_id)
         if preprint:
             provider = preprint.provider
             return {'url': provider.external_url, 'name': provider.name}
     return None
コード例 #21
0
 def get_preprint_provider(self, obj):
     preprint_id = obj.get('preprint', None)
     if preprint_id:
         preprint = Preprint.load(preprint_id)
         if preprint:
             provider = preprint.provider
             return {'url': provider.external_url, 'name': provider.name}
     return None
コード例 #22
0
ファイル: views.py プロジェクト: xlecours/osf.io
    def post(self, request):
        crossref_email_content = lxml.etree.fromstring(str(request.POST['body-plain']))
        status = crossref_email_content.get('status').lower()  # from element doi_batch_diagnostic
        record_count = int(crossref_email_content.find('batch_data/record_count').text)
        records = crossref_email_content.xpath('//record_diagnostic')
        dois_processed = 0

        if status == 'completed':
            guids = []
            # Keep track of errors recieved, ignore those that are handled
            unexpected_errors = False
            for record in records:
                doi = getattr(record.find('doi'), 'text', None)
                guid = doi.split('/')[-1] if doi else None
                guids.append(guid)
                preprint = Preprint.load(guid) if guid else None
                if record.get('status').lower() == 'success' and doi:
                    msg = record.find('msg').text
                    created = bool(msg == 'Successfully added')
                    legacy_doi = preprint.get_identifier(category='legacy_doi')
                    if created or legacy_doi:
                        # Sets preprint_doi_created and saves the preprint
                        preprint.set_identifier_values(doi=doi, save=True)
                    # Double records returned when possible matching DOI is found in crossref
                    elif 'possible preprint/vor pair' not in msg.lower():
                        # Directly updates the identifier
                        preprint.set_identifier_value(category='doi', value=doi)

                    dois_processed += 1

                    # Mark legacy DOIs overwritten by newly batch confirmed crossref DOIs
                    if legacy_doi:
                        legacy_doi.remove()

                elif record.get('status').lower() == 'failure':
                    if 'Relation target DOI does not exist' in record.find('msg').text:
                        logger.warn('Related publication DOI does not exist, sending metadata again without it...')
                        client = preprint.get_doi_client()
                        client.create_identifier(preprint, category='doi', include_relation=False)
                    # This error occurs when a single preprint is being updated several times in a row with the same metadata [#PLAT-944]
                    elif 'less or equal to previously submitted version' in record.find('msg').text and record_count == 2:
                        break
                    else:
                        unexpected_errors = True
            logger.info('Creation success email received from CrossRef for preprints: {}'.format(guids))

        if dois_processed != record_count or status != 'completed':
            if unexpected_errors:
                batch_id = crossref_email_content.find('batch_id').text
                mails.send_mail(
                    to_addr=settings.OSF_SUPPORT_EMAIL,
                    mail=mails.CROSSREF_ERROR,
                    batch_id=batch_id,
                    email_content=request.POST['body-plain'],
                )
                logger.error('Error submitting metadata for batch_id {} with CrossRef, email sent to help desk'.format(batch_id))

        return HttpResponse('Mail received', status=200)
コード例 #23
0
 def get_node_title(self, obj):
     user = self.context['request'].user
     node_title = obj['node']['title']
     node = AbstractNode.load(obj['node']['_id']) or Preprint.load(obj['node']['_id'])
     if not user.is_authenticated:
         if node.is_public:
             return node_title
     elif node.has_permission(user, osf_permissions.READ):
         return node_title
     return 'Private Component'
コード例 #24
0
 def get_node_title(self, obj):
     user = self.context['request'].user
     node_title = obj['node']['title']
     node = AbstractNode.load(obj['node']['_id']) or Preprint.load(obj['node']['_id'])
     if not user.is_authenticated:
         if node.is_public:
             return node_title
     elif node.has_permission(user, osf_permissions.READ):
         return node_title
     return 'Private Component'
コード例 #25
0
    def __init__(self, user, node, event, payload=None):
        super(ComplexFileEvent, self).__init__(user,
                                               node,
                                               event,
                                               payload=payload)

        source_nid = self.payload['source']['node']['_id']
        self.source_node = AbstractNode.load(source_nid) or Preprint.load(
            source_nid)
        self.addon = self.node.get_addon(
            self.payload['destination']['provider'])
コード例 #26
0
    def get_provider_from_url(referer_url: str) -> Optional[PreprintProvider]:
        """
        Takes the many preprint refer urls and try to figure out the provider based on that.
        This will be eliminated post-sloan.
        :param referer_url:
        :return: PreprintProvider
        """

        # matches custom domains:
        provider_domains = list(
            PreprintProvider.objects.exclude(domain='', ).filter(
                domain_redirect_enabled=
                True,  # must exclude our native domains like https://staging2.osf.io/
            ).values_list(
                'domain',
                flat=True,
            ), )
        provider_domains = [
            domains for domains in provider_domains
            if referer_url.startswith(domains)
        ]

        if provider_domains:
            return PreprintProvider.objects.get(domain=provider_domains[0])

        provider_ids_regex = '|'.join([
            re.escape(id)
            for id in PreprintProvider.objects.all().values_list('_id',
                                                                 flat=True)
        ], )
        # matches:
        # /ispp0  (preprint id)
        path = urlparse(referer_url).path.replace('/', '')
        preprint = Preprint.load(path)
        if preprint:
            return preprint.provider

        # matches:
        # /preprints
        # /preprints/
        # /preprints/notfound
        # /preprints/foorxiv
        # /preprints/foorxiv/
        # /preprints/foorxiv/guid0
        provider_regex = r'preprints($|\/$|\/(?P<provider_id>{})|)'.format(
            provider_ids_regex)
        match = re.match(re.escape(DOMAIN) + provider_regex, referer_url)
        if match:
            provider_id = match.groupdict().get('provider_id')
            if provider_id:
                return PreprintProvider.objects.get(_id=provider_id)
            return PreprintProvider.objects.get(_id='osf')
コード例 #27
0
    def test_create_preprint_with_supplemental_public_project(self):
        public_project_payload = build_preprint_create_payload(
            self.public_project._id, self.provider._id)

        res = self.app.post_json_api(self.url,
                                     public_project_payload,
                                     auth=self.user.auth)

        data = res.json['data']
        preprint = Preprint.load(data['id'])
        assert_equal(res.status_code, 201)
        assert_equal(data['attributes']['is_published'], False)
        assert preprint.node == self.public_project
コード例 #28
0
    def test_create_preprint_from_project_published_hits_update(
            self, mock_on_preprint_updated):
        private_project_payload = build_preprint_create_payload(
            self.private_project._id, self.provider._id)
        res = self.app.post_json_api(self.url,
                                     private_project_payload,
                                     auth=self.user.auth)

        assert_false(mock_on_preprint_updated.called)
        preprint = Preprint.load(res.json['data']['id'])
        self.publish_preprint(preprint, self.user)

        assert_true(mock_on_preprint_updated.called)
コード例 #29
0
    def test_create_preprint_does_not_create_a_node(
            self, app, user_one, provider, url, preprint_payload):
        # Assume that if a supplemental node is being created, will be a separate POST to nodes?
        res = app.post_json_api(
            url,
            preprint_payload,
            auth=user_one.auth,
            expect_errors=True)

        assert res.status_code == 201
        preprint = Preprint.load(res.json['data']['id'])
        assert preprint.node is None
        assert not Node.objects.filter(
            preprints__guids___id=res.json['data']['id']).exists()
コード例 #30
0
    def test_create_preprint_adds_log_if_published(self):
        public_project_payload = build_preprint_create_payload(
            provider_id=self.provider._id, )
        res = self.app.post_json_api(self.url,
                                     public_project_payload,
                                     auth=self.user.auth)
        assert_equal(res.status_code, 201)

        preprint = Preprint.load(res.json['data']['id'])
        res = self.publish_preprint(preprint, self.user)

        log = preprint.logs.latest()
        assert_equal(log.action, 'published')
        assert_equal(log.params.get('preprint'), preprint._id)
コード例 #31
0
    def test_create_preprint_with_supplemental_public_project(self):
        public_project_payload = build_preprint_create_payload(
            self.public_project._id, self.provider._id)

        res = self.app.post_json_api(
            self.url,
            public_project_payload,
            auth=self.user.auth)

        data = res.json['data']
        preprint = Preprint.load(data['id'])
        assert_equal(res.status_code, 201)
        assert_equal(data['attributes']['is_published'], False)
        assert preprint.node == self.public_project
コード例 #32
0
    def test_create_preprint_does_not_create_a_node(
            self, app, user_one, provider, url, preprint_payload):
        # Assume that if a supplemental node is being created, will be a separate POST to nodes?
        res = app.post_json_api(
            url,
            preprint_payload,
            auth=user_one.auth,
            expect_errors=True)

        assert res.status_code == 201
        preprint = Preprint.load(res.json['data']['id'])
        assert preprint.node is None
        assert not Node.objects.filter(
            preprints__guids___id=res.json['data']['id']).exists()
コード例 #33
0
    def test_create_preprint_from_project_published_hits_update(
            self, mock_on_preprint_updated):
        private_project_payload = build_preprint_create_payload(
            self.private_project._id,
            self.provider._id)
        res = self.app.post_json_api(
            self.url,
            private_project_payload,
            auth=self.user.auth)

        assert_false(mock_on_preprint_updated.called)
        preprint = Preprint.load(res.json['data']['id'])
        self.publish_preprint(preprint, self.user)

        assert_true(mock_on_preprint_updated.called)
コード例 #34
0
    def test_create_preprint_adds_log_if_published(self):
        public_project_payload = build_preprint_create_payload(
            provider_id=self.provider._id,
        )
        res = self.app.post_json_api(
            self.url,
            public_project_payload,
            auth=self.user.auth)
        assert_equal(res.status_code, 201)

        preprint = Preprint.load(res.json['data']['id'])
        res = self.publish_preprint(preprint, self.user)

        log = preprint.logs.latest()
        assert_equal(log.action, 'published')
        assert_equal(log.params.get('preprint'), preprint._id)
コード例 #35
0
def verify_merge(merged_list):
    """
    Expecting merged list in format [{"user_id": "abcde", "preprints": ["12345"]}]
    """
    for user_dict in merged_list:
        user = OSFUser.load(user_dict['user_id'])
        merged_by = user.merged_by
        for preprint_id in user_dict['preprints']:
            preprint = Preprint.load(preprint_id)
            user_contrib = _get_preprint_contributor(preprint, user)
            merged_by_contrib = _get_preprint_contributor(preprint, merged_by)
            print 'Preprint: {}'.format(preprint._id)
            print '    User: {}, Merged by: {}'.format(user._id, merged_by._id)
            print '   Perms: {}, {}'.format(user_contrib.permission if user_contrib else None, merged_by_contrib.permission if merged_by_contrib else None)
            print '     Bib: {}, {}'.format(user_contrib.visible if user_contrib else None, merged_by_contrib.visible if merged_by_contrib else None)
            print ' Creator: {}, {}'.format(preprint.creator == user, preprint.creator == merged_by)
            print '_______________________________________'
コード例 #36
0
    def test_create_preprint_with_supplementary_node(self, app, user_one,
                                                     provider, url,
                                                     preprint_payload,
                                                     supplementary_project):
        preprint_payload['data']['relationships']['node'] = {
            'data': {
                'id': supplementary_project._id,
                'type': 'nodes'
            }
        }
        res = app.post_json_api(url, preprint_payload, auth=user_one.auth)

        assert res.status_code == 201
        preprint = Preprint.load(res.json['data']['id'])
        assert preprint.node == supplementary_project
        assert Node.objects.filter(
            preprints__guids___id=res.json['data']['id']).exists()
コード例 #37
0
 def test_setting_is_published_with_moderated_provider_fails(
         self, mock_on_preprint_updated):
     self.provider.reviews_workflow = 'pre-moderation'
     self.provider.save()
     public_project_payload = build_preprint_create_payload(
         self.public_project._id,
         self.provider._id,
     )
     res = self.app.post_json_api(self.url,
                                  public_project_payload,
                                  auth=self.user.auth,
                                  expect_errors=True)
     assert res.status_code == 201
     preprint = Preprint.load(res.json['data']['id'])
     res = self.publish_preprint(preprint, self.user, expect_errors=True)
     assert res.status_code == 409
     assert not mock_on_preprint_updated.called
コード例 #38
0
    def test_create_preprint_with_supplementary_node(
            self, app, user_one, provider, url, preprint_payload, supplementary_project):
        preprint_payload['data']['relationships']['node'] = {
            'data': {
                'id': supplementary_project._id,
                'type': 'nodes'
            }
        }
        res = app.post_json_api(
            url,
            preprint_payload,
            auth=user_one.auth)

        assert res.status_code == 201
        preprint = Preprint.load(res.json['data']['id'])
        assert preprint.node == supplementary_project
        assert Node.objects.filter(
            preprints__guids___id=res.json['data']['id']).exists()
コード例 #39
0
 def test_setting_is_published_with_moderated_provider_fails(
         self, mock_on_preprint_updated):
     self.provider.reviews_workflow = 'pre-moderation'
     self.provider.save()
     public_project_payload = build_preprint_create_payload(
         self.public_project._id,
         self.provider._id,
     )
     res = self.app.post_json_api(
         self.url,
         public_project_payload,
         auth=self.user.auth,
         expect_errors=True)
     assert res.status_code == 201
     preprint = Preprint.load(res.json['data']['id'])
     res = self.publish_preprint(preprint, self.user, expect_errors=True)
     assert res.status_code == 409
     assert not mock_on_preprint_updated.called
コード例 #40
0
ファイル: views.py プロジェクト: jwalz/osf.io
    def get_default_queryset(self):
        user = get_user_auth(self.request).user
        preprint_contributors = Preprint.load(self.kwargs['preprint_id'])._contributors
        queryset = ChronosSubmission.objects.filter(preprint__guids___id=self.kwargs['preprint_id'])

        # Get the list of stale submissions and queue a task to update them
        update_list_id = queryset.filter(
            modified__lt=chronos_submission_stale_time(),
        ).values_list('id', flat=True)
        if len(update_list_id) > 0:
            enqueue_task(update_submissions_status_async.s(list(update_list_id)))

        # If the user is a contributor on this preprint, show all submissions
        # Otherwise, only show submissions in status 3 or 4 (accepted or published)
        if user and preprint_contributors.filter(id=user.id).exists():
            return queryset
        else:
            return queryset.filter(status__in=[3, 4])
コード例 #41
0
ファイル: views.py プロジェクト: CenterForOpenScience/osf.io
    def get_default_queryset(self):
        user = get_user_auth(self.request).user
        preprint_contributors = Preprint.load(self.kwargs['preprint_id'])._contributors
        queryset = ChronosSubmission.objects.filter(preprint__guids___id=self.kwargs['preprint_id'])

        # Get the list of stale submissions and queue a task to update them
        update_list_id = queryset.filter(
            modified__lt=chronos_submission_stale_time(),
        ).values_list('id', flat=True)
        if len(update_list_id) > 0:
            enqueue_task(update_submissions_status_async.s(list(update_list_id)))

        # If the user is a contributor on this preprint, show all submissions
        # Otherwise, only show submissions in status 3 or 4 (accepted or published)
        if user and preprint_contributors.filter(id=user.id).exists():
            return queryset
        else:
            return queryset.filter(status__in=[3, 4])
コード例 #42
0
def _async_update_preprint_share(self, preprint_id, old_subjects, share_type):
    # Any modifications to this function may need to change _update_preprint_share
    # Takes preprint_id to ensure async retries push fresh data
    Preprint = apps.get_model('osf.Preprint')
    preprint = Preprint.load(preprint_id)

    data = serialize_share_preprint_data(preprint, share_type, old_subjects)
    resp = send_share_preprint_data(preprint, data)
    try:
        resp.raise_for_status()
    except Exception as e:
        if resp.status_code >= 500:
            if self.request.retries == self.max_retries:
                send_desk_share_preprint_error(preprint, resp,
                                               self.request.retries)
            raise self.retry(exc=e,
                             countdown=(random.random() + 1) * min(
                                 60 + settings.CELERY_RETRY_BACKOFF_BASE**
                                 self.request.retries, 60 * 10))
        else:
            send_desk_share_preprint_error(preprint, resp,
                                           self.request.retries)
コード例 #43
0
def on_preprint_updated(preprint_id,
                        update_share=True,
                        share_type=None,
                        old_subjects=None,
                        saved_fields=None):
    # WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable
    # transactions are implemented in View and Task application layers.
    from osf.models import Preprint
    preprint = Preprint.load(preprint_id)
    if old_subjects is None:
        old_subjects = []
    need_update = bool(
        preprint.SEARCH_UPDATE_FIELDS.intersection(saved_fields or {}))

    if need_update:
        preprint.update_search()

    if should_update_preprint_identifiers(preprint, old_subjects,
                                          saved_fields):
        update_or_create_preprint_identifiers(preprint)

    if update_share:
        update_preprint_share(preprint, old_subjects, share_type)
コード例 #44
0
 def has_permission(self, request, view):
     preprint = Preprint.load(view.kwargs.get('preprint_id', None))
     if not preprint:
         raise exceptions.NotFound
     return PreprintPublishedOrAdmin().has_object_permission(request, view, preprint)
コード例 #45
0
ファイル: views.py プロジェクト: Priyanshu72/osf.io
def create_waterbutler_log(payload, **kwargs):
    with transaction.atomic():
        try:
            auth = payload['auth']
            # Don't log download actions
            if payload['action'] in DOWNLOAD_ACTIONS:
                guid = Guid.load(payload['metadata'].get('nid'))
                if guid:
                    node = guid.referent
                return {'status': 'success'}

            user = OSFUser.load(auth['id'])
            if user is None:
                raise HTTPError(http_status.HTTP_400_BAD_REQUEST)

            action = LOG_ACTION_MAP[payload['action']]
        except KeyError:
            raise HTTPError(http_status.HTTP_400_BAD_REQUEST)

        auth = Auth(user=user)
        node = kwargs.get('node') or kwargs.get('project') or Preprint.load(
            kwargs.get('nid')) or Preprint.load(kwargs.get('pid'))

        if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED):

            for bundle in ('source', 'destination'):
                for key in ('provider', 'materialized', 'name', 'nid'):
                    if key not in payload[bundle]:
                        raise HTTPError(http_status.HTTP_400_BAD_REQUEST)

            dest = payload['destination']
            src = payload['source']

            if src is not None and dest is not None:
                dest_path = dest['materialized']
                src_path = src['materialized']
                if dest_path.endswith('/') and src_path.endswith('/'):
                    dest_path = os.path.dirname(dest_path)
                    src_path = os.path.dirname(src_path)
                if (os.path.split(dest_path)[0] == os.path.split(src_path)[0]
                        and dest['provider'] == src['provider']
                        and dest['nid'] == src['nid']
                        and dest['name'] != src['name']):
                    action = LOG_ACTION_MAP['rename']

            destination_node = node  # For clarity
            source_node = AbstractNode.load(src['nid']) or Preprint.load(
                src['nid'])

            # We return provider fullname so we need to load node settings, if applicable
            source = None
            if hasattr(source_node, 'get_addon'):
                source = source_node.get_addon(payload['source']['provider'])
            destination = None
            if hasattr(node, 'get_addon'):
                destination = node.get_addon(
                    payload['destination']['provider'])

            payload['source'].update({
                'materialized':
                payload['source']['materialized'].lstrip('/'),
                'addon':
                source.config.full_name if source else 'osfstorage',
                'url':
                source_node.web_url_for(
                    'addon_view_or_download_file',
                    path=payload['source']['path'].lstrip('/'),
                    provider=payload['source']['provider']),
                'node': {
                    '_id': source_node._id,
                    'url': source_node.url,
                    'title': source_node.title,
                }
            })

            payload['destination'].update({
                'materialized':
                payload['destination']['materialized'].lstrip('/'),
                'addon':
                destination.config.full_name if destination else 'osfstorage',
                'url':
                destination_node.web_url_for(
                    'addon_view_or_download_file',
                    path=payload['destination']['path'].lstrip('/'),
                    provider=payload['destination']['provider']),
                'node': {
                    '_id': destination_node._id,
                    'url': destination_node.url,
                    'title': destination_node.title,
                }
            })

            if not payload.get('errors'):
                destination_node.add_log(action=action,
                                         auth=auth,
                                         params=payload)

            if payload.get('email') is True or payload.get('errors'):
                mails.send_mail(
                    user.username,
                    mails.FILE_OPERATION_FAILED
                    if payload.get('errors') else mails.FILE_OPERATION_SUCCESS,
                    action=payload['action'],
                    source_node=source_node,
                    destination_node=destination_node,
                    source_path=payload['source']['materialized'],
                    source_addon=payload['source']['addon'],
                    destination_addon=payload['destination']['addon'],
                    osf_support_email=settings.OSF_SUPPORT_EMAIL)

            if payload.get('errors'):
                # Action failed but our function succeeded
                # Bail out to avoid file_signals
                return {'status': 'success'}

        else:
            node.create_waterbutler_log(auth, action, payload)

    metadata = payload.get('metadata') or payload.get('destination')

    target_node = AbstractNode.load(metadata.get('nid'))
    if target_node and not target_node.is_quickfiles and payload[
            'action'] != 'download_file':
        update_storage_usage_with_size(payload)

    with transaction.atomic():
        file_signals.file_updated.send(target=node,
                                       user=user,
                                       event_type=action,
                                       payload=payload)

    return {'status': 'success'}
コード例 #46
0
ファイル: views.py プロジェクト: Priyanshu72/osf.io
def get_auth(auth, **kwargs):
    cas_resp = None
    if not auth.user:
        # Central Authentication Server OAuth Bearer Token
        authorization = request.headers.get('Authorization')
        if authorization and authorization.startswith('Bearer '):
            client = cas.get_client()
            try:
                access_token = cas.parse_auth_header(authorization)
                cas_resp = client.profile(access_token)
            except cas.CasError as err:
                sentry.log_exception()
                # NOTE: We assume that the request is an AJAX request
                return json_renderer(err)
            if cas_resp.authenticated:
                auth.user = OSFUser.load(cas_resp.user)

    try:
        data = jwt.decode(jwe.decrypt(
            request.args.get('payload', '').encode('utf-8'),
            WATERBUTLER_JWE_KEY),
                          settings.WATERBUTLER_JWT_SECRET,
                          options={'require_exp': True},
                          algorithm=settings.WATERBUTLER_JWT_ALGORITHM)['data']
    except (jwt.InvalidTokenError, KeyError) as err:
        sentry.log_message(str(err))
        raise HTTPError(http_status.HTTP_403_FORBIDDEN)

    if not auth.user:
        auth.user = OSFUser.from_cookie(data.get('cookie', ''))

    try:
        action = data['action']
        node_id = data['nid']
        provider_name = data['provider']
    except KeyError:
        raise HTTPError(http_status.HTTP_400_BAD_REQUEST)

    node = AbstractNode.load(node_id) or Preprint.load(node_id)
    if node and node.is_deleted:
        raise HTTPError(http_status.HTTP_410_GONE)
    elif not node:
        raise HTTPError(http_status.HTTP_404_NOT_FOUND)

    check_access(node, auth, action, cas_resp)
    provider_settings = None
    if hasattr(node, 'get_addon'):
        provider_settings = node.get_addon(provider_name)
        if not provider_settings:
            raise HTTPError(http_status.HTTP_400_BAD_REQUEST)

    path = data.get('path')
    credentials = None
    waterbutler_settings = None
    fileversion = None
    if provider_name == 'osfstorage':
        if path:
            file_id = path.strip('/')
            # check to see if this is a file or a folder
            filenode = OsfStorageFileNode.load(path.strip('/'))
            if filenode and filenode.is_file:
                # default to most recent version if none is provided in the response
                version = int(data['version']) if data.get(
                    'version') else filenode.versions.count()
                try:
                    fileversion = FileVersion.objects.filter(
                        basefilenode___id=file_id,
                        identifier=version).select_related('region').get()
                except FileVersion.DoesNotExist:
                    raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
                if auth.user:
                    # mark fileversion as seen
                    FileVersionUserMetadata.objects.get_or_create(
                        user=auth.user, file_version=fileversion)
                if not node.is_contributor_or_group_member(auth.user):
                    from_mfr = download_is_from_mfr(request, payload=data)
                    # version index is 0 based
                    version_index = version - 1
                    if action == 'render':
                        update_analytics(node, filenode, version_index, 'view')
                    elif action == 'download' and not from_mfr:
                        update_analytics(node, filenode, version_index,
                                         'download')
                    if waffle.switch_is_active(features.ELASTICSEARCH_METRICS):
                        if isinstance(node, Preprint):
                            metric_class = get_metric_class_for_action(
                                action, from_mfr=from_mfr)
                            if metric_class:
                                sloan_flags = {
                                    'sloan_id':
                                    request.cookies.get(SLOAN_ID_COOKIE_NAME)
                                }
                                for flag_name in SLOAN_FLAGS:
                                    value = request.cookies.get(
                                        f'dwf_{flag_name}_custom_domain'
                                    ) or request.cookies.get(
                                        f'dwf_{flag_name}')
                                    if value:
                                        sloan_flags[flag_name.replace(
                                            '_display', '')] = strtobool(value)

                                try:
                                    metric_class.record_for_preprint(
                                        preprint=node,
                                        user=auth.user,
                                        version=fileversion.identifier
                                        if fileversion else None,
                                        path=path,
                                        **sloan_flags)
                                except es_exceptions.ConnectionError:
                                    log_exception()
        if fileversion and provider_settings:
            region = fileversion.region
            credentials = region.waterbutler_credentials
            waterbutler_settings = fileversion.serialize_waterbutler_settings(
                node_id=provider_settings.owner._id,
                root_id=provider_settings.root_node._id,
            )
    # If they haven't been set by version region, use the NodeSettings or Preprint directly
    if not (credentials and waterbutler_settings):
        credentials = node.serialize_waterbutler_credentials(provider_name)
        waterbutler_settings = node.serialize_waterbutler_settings(
            provider_name)

    if isinstance(credentials.get('token'), bytes):
        credentials['token'] = credentials.get('token').decode()

    return {
        'payload':
        jwe.encrypt(
            jwt.encode(
                {
                    'exp':
                    timezone.now() + datetime.timedelta(
                        seconds=settings.WATERBUTLER_JWT_EXPIRATION),
                    'data': {
                        'auth':
                        make_auth(
                            auth.user
                        ),  # A waterbutler auth dict not an Auth object
                        'credentials':
                        credentials,
                        'settings':
                        waterbutler_settings,
                        'callback_url':
                        node.api_url_for(
                            ('create_waterbutler_log'
                             if not getattr(node, 'is_registration', False)
                             else 'registration_callbacks'),
                            _absolute=True,
                            _internal=True)
                    }
                },
                settings.WATERBUTLER_JWT_SECRET,
                algorithm=settings.WATERBUTLER_JWT_ALGORITHM),
            WATERBUTLER_JWE_KEY).decode()
    }
コード例 #47
0
ファイル: files.py プロジェクト: CenterForOpenScience/osf.io
    def __init__(self, user, node, event, payload=None):
        super(ComplexFileEvent, self).__init__(user, node, event, payload=payload)

        source_nid = self.payload['source']['node']['_id']
        self.source_node = AbstractNode.load(source_nid) or Preprint.load(source_nid)
        self.addon = self.node.get_addon(self.payload['destination']['provider'])
コード例 #48
0
ファイル: views.py プロジェクト: aaxelb/osf.io
def create_waterbutler_log(payload, **kwargs):
    with transaction.atomic():
        try:
            auth = payload['auth']
            # Don't log download actions, but do update analytics
            if payload['action'] in DOWNLOAD_ACTIONS:
                guid = Guid.load(payload['metadata'].get('nid'))
                if guid:
                    node = guid.referent
                return {'status': 'success'}

            user = OSFUser.load(auth['id'])
            if user is None:
                raise HTTPError(httplib.BAD_REQUEST)

            action = LOG_ACTION_MAP[payload['action']]
        except KeyError:
            raise HTTPError(httplib.BAD_REQUEST)

        auth = Auth(user=user)
        node = kwargs.get('node') or kwargs.get('project') or Preprint.load(kwargs.get('nid')) or Preprint.load(kwargs.get('pid'))

        if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED):

            for bundle in ('source', 'destination'):
                for key in ('provider', 'materialized', 'name', 'nid'):
                    if key not in payload[bundle]:
                        raise HTTPError(httplib.BAD_REQUEST)

            dest = payload['destination']
            src = payload['source']

            if src is not None and dest is not None:
                dest_path = dest['materialized']
                src_path = src['materialized']
                if dest_path.endswith('/') and src_path.endswith('/'):
                    dest_path = os.path.dirname(dest_path)
                    src_path = os.path.dirname(src_path)
                if (
                    os.path.split(dest_path)[0] == os.path.split(src_path)[0] and
                    dest['provider'] == src['provider'] and
                    dest['nid'] == src['nid'] and
                    dest['name'] != src['name']
                ):
                    action = LOG_ACTION_MAP['rename']

            destination_node = node  # For clarity
            source_node = AbstractNode.load(src['nid']) or Preprint.load(src['nid'])

            # We return provider fullname so we need to load node settings, if applicable
            source = None
            if hasattr(source_node, 'get_addon'):
                source = source_node.get_addon(payload['source']['provider'])
            destination = None
            if hasattr(node, 'get_addon'):
                destination = node.get_addon(payload['destination']['provider'])

            payload['source'].update({
                'materialized': payload['source']['materialized'].lstrip('/'),
                'addon': source.config.full_name if source else 'osfstorage',
                'url': source_node.web_url_for(
                    'addon_view_or_download_file',
                    path=payload['source']['path'].lstrip('/'),
                    provider=payload['source']['provider']
                ),
                'node': {
                    '_id': source_node._id,
                    'url': source_node.url,
                    'title': source_node.title,
                }
            })

            payload['destination'].update({
                'materialized': payload['destination']['materialized'].lstrip('/'),
                'addon': destination.config.full_name if destination else 'osfstorage',
                'url': destination_node.web_url_for(
                    'addon_view_or_download_file',
                    path=payload['destination']['path'].lstrip('/'),
                    provider=payload['destination']['provider']
                ),
                'node': {
                    '_id': destination_node._id,
                    'url': destination_node.url,
                    'title': destination_node.title,
                }
            })

            if not payload.get('errors'):
                destination_node.add_log(
                    action=action,
                    auth=auth,
                    params=payload
                )

            if payload.get('email') is True or payload.get('errors'):
                mails.send_mail(
                    user.username,
                    mails.FILE_OPERATION_FAILED if payload.get('errors')
                    else mails.FILE_OPERATION_SUCCESS,
                    action=payload['action'],
                    source_node=source_node,
                    destination_node=destination_node,
                    source_path=payload['source']['materialized'],
                    source_addon=payload['source']['addon'],
                    destination_addon=payload['destination']['addon'],
                    osf_support_email=settings.OSF_SUPPORT_EMAIL
                )

            if payload.get('errors'):
                # Action failed but our function succeeded
                # Bail out to avoid file_signals
                return {'status': 'success'}

        else:
            node.create_waterbutler_log(auth, action, payload)

    with transaction.atomic():
        file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload)

    return {'status': 'success'}
コード例 #49
0
def get_auth(auth, **kwargs):
    cas_resp = None
    if not auth.user:
        # Central Authentication Server OAuth Bearer Token
        authorization = request.headers.get('Authorization')
        if authorization and authorization.startswith('Bearer '):
            client = cas.get_client()
            try:
                access_token = cas.parse_auth_header(authorization)
                cas_resp = client.profile(access_token)
            except cas.CasError as err:
                sentry.log_exception()
                # NOTE: We assume that the request is an AJAX request
                return json_renderer(err)
            if cas_resp.authenticated:
                auth.user = OSFUser.load(cas_resp.user)

    try:
        data = jwt.decode(
            jwe.decrypt(request.args.get('payload', '').encode('utf-8'), WATERBUTLER_JWE_KEY),
            settings.WATERBUTLER_JWT_SECRET,
            options={'require_exp': True},
            algorithm=settings.WATERBUTLER_JWT_ALGORITHM
        )['data']
    except (jwt.InvalidTokenError, KeyError) as err:
        sentry.log_message(str(err))
        raise HTTPError(httplib.FORBIDDEN)

    if not auth.user:
        auth.user = OSFUser.from_cookie(data.get('cookie', ''))

    try:
        action = data['action']
        node_id = data['nid']
        provider_name = data['provider']
    except KeyError:
        raise HTTPError(httplib.BAD_REQUEST)

    node = AbstractNode.load(node_id) or Preprint.load(node_id)
    if not node:
        raise HTTPError(httplib.NOT_FOUND)

    check_access(node, auth, action, cas_resp)
    provider_settings = None
    if hasattr(node, 'get_addon'):
        provider_settings = node.get_addon(provider_name)
        if not provider_settings:
            raise HTTPError(httplib.BAD_REQUEST)

    try:
        path = data.get('path')
        version = data.get('version')
        credentials = None
        waterbutler_settings = None
        fileversion = None
        if provider_name == 'osfstorage':
            if path and version:
                # check to see if this is a file or a folder
                filenode = OsfStorageFileNode.load(path.strip('/'))
                if filenode and filenode.is_file:
                    try:
                        fileversion = FileVersion.objects.filter(
                            basefilenode___id=path.strip('/'),
                            identifier=version
                        ).select_related('region').get()
                    except FileVersion.DoesNotExist:
                        raise HTTPError(httplib.BAD_REQUEST)
            # path and no version, use most recent version
            elif path:
                filenode = OsfStorageFileNode.load(path.strip('/'))
                if filenode and filenode.is_file:
                    fileversion = FileVersion.objects.filter(
                        basefilenode=filenode
                    ).select_related('region').order_by('-created').first()
            if fileversion:
                region = fileversion.region
                credentials = region.waterbutler_credentials
                waterbutler_settings = fileversion.serialize_waterbutler_settings(
                    node_id=provider_settings.owner._id if provider_settings else node._id,
                    root_id=provider_settings.root_node._id if provider_settings else node.root_folder._id,
                )
        # If they haven't been set by version region, use the NodeSettings region
        if not (credentials and waterbutler_settings):
            credentials = node.serialize_waterbutler_credentials(provider_name)
            waterbutler_settings = node.serialize_waterbutler_settings(provider_name)
    except exceptions.AddonError:
        log_exception()
        raise HTTPError(httplib.BAD_REQUEST)

    # TODO: Add a signal here?
    if waffle.switch_is_active(features.ELASTICSEARCH_METRICS):
        user = auth.user
        if isinstance(node, Preprint) and not node.is_contributor(user):
            metric_class = get_metric_class_for_action(action)
            if metric_class:
                try:
                    metric_class.record_for_preprint(
                        preprint=node,
                        user=user,
                        version=fileversion.identifier if fileversion else None,
                        path=path
                    )
                except es_exceptions.ConnectionError:
                    log_exception()

    return {'payload': jwe.encrypt(jwt.encode({
        'exp': timezone.now() + datetime.timedelta(seconds=settings.WATERBUTLER_JWT_EXPIRATION),
        'data': {
            'auth': make_auth(auth.user),  # A waterbutler auth dict not an Auth object
            'credentials': credentials,
            'settings': waterbutler_settings,
            'callback_url': node.api_url_for(
                ('create_waterbutler_log' if not getattr(node, 'is_registration', False) else 'registration_callbacks'),
                _absolute=True,
                _internal=True
            )
        }
    }, settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM), WATERBUTLER_JWE_KEY)}
コード例 #50
0
 def load_resource(self, context, view):
     return Preprint.load(context[view.preprint_lookup_url_kwarg])