Example #1
0
def activity():
    """Reads node activity from pre-generated popular projects and registrations.
    New and Noteworthy projects are set manually or through `scripts/populate_new_and_noteworthy_projects.py`
    Popular projects and registrations are generated by `scripts/populate_popular_projects_and_registrations.py`
    """
    # Prevent circular import
    from osf.models import AbstractNode as Node

    # New and Noreworthy Projects
    try:
        new_and_noteworthy_projects = Node.load(settings.NEW_AND_NOTEWORTHY_LINKS_NODE).nodes_pointer
    except AttributeError:
        new_and_noteworthy_projects = []

    # Popular Projects
    try:
        popular_public_projects = Node.load(settings.POPULAR_LINKS_NODE).nodes_pointer
    except AttributeError:
        popular_public_projects = []

    # Popular Registrations
    try:
        popular_public_registrations = Node.load(settings.POPULAR_LINKS_REGISTRATIONS).nodes_pointer
    except AttributeError:
        popular_public_registrations = []

    return {
        'new_and_noteworthy_projects': new_and_noteworthy_projects,
        'recent_public_registrations': utils.recent_public_registrations(),
        'popular_public_projects': popular_public_projects,
        'popular_public_registrations': popular_public_registrations,
    }
def main(dry_run=True):
    init_app(routes=False)
    from osf.models import AbstractNode
    from website.project.utils import activity

    popular_activity = activity()

    popular_nodes = popular_activity['popular_public_projects']
    popular_links_node = AbstractNode.find_one(Q('_id', 'eq', POPULAR_LINKS_NODE))
    popular_registrations = popular_activity['popular_public_registrations']
    popular_links_registrations = AbstractNode.find_one(Q('_id', 'eq', POPULAR_LINKS_REGISTRATIONS))

    update_node_links(popular_links_node, popular_nodes, 'popular')
    update_node_links(popular_links_registrations, popular_registrations, 'popular registrations')
    try:
        popular_links_node.save()
        logger.info('Node links on {} updated.'.format(popular_links_node._id))
    except (KeyError, RuntimeError) as error:
        logger.error('Could not migrate popular nodes due to error')
        logger.exception(error)

    try:
        popular_links_registrations.save()
        logger.info('Node links for registrations on {} updated.'.format(popular_links_registrations._id))
    except (KeyError, RuntimeError) as error:
        logger.error('Could not migrate popular nodes for registrations due to error')
        logger.exception(error)

    if dry_run:
        raise RuntimeError('Dry run -- transaction rolled back.')
Example #3
0
def migrate_nodes(index, query=None):
    logger.info('Migrating nodes to index: {}'.format(index))
    node_query = Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False)
    if query:
        node_query = query & node_query
    total = Node.find(node_query).count()
    increment = 200
    total_pages = (total // increment) + 1
    pages = paginated(Node, query=node_query, increment=increment, each=False, include=['contributor__user__guids'])

    for page_number, page in enumerate(pages):
        logger.info('Updating page {} / {}'.format(page_number + 1, total_pages))
        Node.bulk_update_search(page, index=index)

    logger.info('Nodes migrated: {}'.format(total))
Example #4
0
def compile_subscriptions(node, event_type, event=None, level=0):
    """Recurse through node and parents for subscriptions.

    :param node: current node
    :param event_type: Generally node_subscriptions_available
    :param event: Particular event such a file_updated that has specific file subs
    :param level: How deep the recursion is
    :return: a dict of notification types with lists of users.
    """
    subscriptions = check_node(node, event_type)
    if event:
        subscriptions = check_node(
            node, event)  # Gets particular event subscriptions
        parent_subscriptions = compile_subscriptions(
            node, event_type, level=level + 1)  # get node and parent subs
    elif node.parent_id:
        parent_subscriptions = \
            compile_subscriptions(AbstractNode.load(node.parent_id), event_type, level=level + 1)
    else:
        parent_subscriptions = check_node(None, event_type)
    for notification_type in parent_subscriptions:
        p_sub_n = parent_subscriptions[notification_type]
        p_sub_n.extend(subscriptions[notification_type])
        for nt in subscriptions:
            if notification_type != nt:
                p_sub_n = list(set(p_sub_n).difference(set(subscriptions[nt])))
        if level == 0:
            p_sub_n, removed = utils.separate_users(node, p_sub_n)
        parent_subscriptions[notification_type] = p_sub_n
    return parent_subscriptions
Example #5
0
 def has_object_permission(self, request, view, obj):
     if not isinstance(obj, AbstractNode):
         obj = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg])
     assert isinstance(obj, AbstractNode), 'obj must be an Node'
     if obj.is_registration:
         return request.method in permissions.SAFE_METHODS
     return True
Example #6
0
def fork_pointer(auth, node, **kwargs):
    """Fork a pointer. Raises BAD_REQUEST if pointer not provided, not found,
    or not present in `nodes`.

    :param Auth auth: Consolidated authorization
    :param Node node: root from which pointer is child
    :return: Fork of node to which nodelink(pointer) points
    """
    NodeRelation = apps.get_model('osf.NodeRelation')

    linked_node_id = request.json.get('nodeId')
    linked_node = AbstractNode.load(linked_node_id)
    pointer = NodeRelation.objects.filter(child=linked_node,
                                          is_node_link=True,
                                          parent=node).first()

    if pointer is None:
        # TODO: Change this to 404?
        raise HTTPError(http.BAD_REQUEST)

    try:
        fork = node.fork_pointer(pointer, auth=auth, save=True)
    except ValueError:
        raise HTTPError(http.BAD_REQUEST)

    return {
        'data': {
            'node': serialize_node_summary(node=fork,
                                           auth=auth,
                                           show_path=False)
        }
    }, http.CREATED
Example #7
0
    def has_object_permission(self, request, view, obj):
        assert isinstance(obj, dict)
        auth = get_user_auth(request)
        parent_node = obj['self']

        if request.method in permissions.SAFE_METHODS:
            return parent_node.can_view(auth)
        elif request.method == 'DELETE':
            return parent_node.can_edit(auth)
        else:
            has_parent_auth = parent_node.can_edit(auth)
            if not has_parent_auth:
                return False
            pointer_nodes = []
            for pointer in request.data.get('data', []):
                node = AbstractNode.load(pointer['id'])
                if not node or node.is_collection:
                    raise exceptions.NotFound(
                        detail='Node with id "{}" was not found'.format(
                            pointer['id']))
                pointer_nodes.append(node)
            has_pointer_auth = True
            for pointer in pointer_nodes:
                if not pointer.can_view(auth):
                    has_pointer_auth = False
                    break
            return has_pointer_auth
    def test_bulk_creates_children_and_sanitizes_html_logged_in_owner(
            self, app, user, project, url):
        title = '<em>Reasoning</em> <strong>Aboot Projects</strong>'
        description = 'A <script>alert("super reasonable")</script> child'

        res = app.post_json_api(url, {
            'data': [{
                'type': 'nodes',
                'attributes': {
                    'title': title,
                    'description': description,
                    'category': 'project',
                    'public': True
                }
            }]
        },
                                auth=user.auth,
                                bulk=True)
        child_id = res.json['data'][0]['id']
        assert res.status_code == 201
        url = '/{}nodes/{}/'.format(API_BASE, child_id)

        res = app.get(url, auth=user.auth)
        assert res.json['data']['attributes']['title'] == strip_html(title)
        assert res.json['data']['attributes']['description'] == strip_html(
            description)
        assert res.json['data']['attributes']['category'] == 'project'

        project.reload()
        child_id = res.json['data']['id']
        assert child_id == project.nodes[0]._id
        assert AbstractNode.load(
            child_id).logs.latest().action == NodeLog.PROJECT_CREATED
def _add_related_claimed_tag_to_user(pid, user):
    """
    Adds claimed tag to incoming users, depending on whether the resource has related source tags
    :param pid: guid of either the node or the preprint
    :param user: the claiming user
    """
    node = AbstractNode.load(pid)
    preprint = Preprint.load(pid)
    osf_claimed_tag, created = Tag.all_tags.get_or_create(
        name=provider_claimed_tag('osf'), system=True)
    if node:
        node_source_tags = node.all_tags.filter(name__icontains='source:',
                                                system=True)
        if node_source_tags.exists():
            for tag in node_source_tags:
                claimed_tag, created = Tag.all_tags.get_or_create(
                    name=NODE_SOURCE_TAG_CLAIMED_TAG_RELATION[tag.name],
                    system=True)
                user.add_system_tag(claimed_tag)
        else:
            user.add_system_tag(osf_claimed_tag)
    elif preprint:
        provider_id = preprint.provider._id
        preprint_claimed_tag, created = Tag.all_tags.get_or_create(
            name=provider_claimed_tag(provider_id, 'preprint'), system=True)
        user.add_system_tag(preprint_claimed_tag)
Example #10
0
def get_settings_url(uid, user):
    if uid == user._id:
        return web_url_for('user_notifications', _absolute=True)

    node = AbstractNode.load(uid)
    assert node, 'get_settings_url recieved an invalid Node id'
    return node.web_url_for('node_setting', _guid=True, _absolute=True)
Example #11
0
    def has_object_permission(self, request, view, obj):
        assert isinstance(obj, dict)
        auth = get_user_auth(request)
        collection = obj['self']
        has_collection_auth = auth.user and auth.user.has_perm('write_collection', collection)

        if request.method in permissions.SAFE_METHODS:
            if collection.is_public:
                return True
        elif request.method == 'DELETE':
            return has_collection_auth

        if not has_collection_auth:
            return False
        pointer_nodes = []
        for pointer in request.data.get('data', []):
            node = AbstractNode.load(pointer['id'])
            if not node:
                raise NotFound(detail='Node with id "{}" was not found'.format(pointer['id']))
            pointer_nodes.append(node)
        has_pointer_auth = True
        # TODO: is this necessary? get_object checks can_view
        for pointer in pointer_nodes:
            if not pointer.can_view(auth):
                has_pointer_auth = False
                break
        return has_pointer_auth
Example #12
0
 def setUp(self, *args, **kwargs):
     OsfTestCase.setUp(self, *args, **kwargs)
     if not self.kind:
         return
     self.sanction = self.Factory()
     self.reg = AbstractNode.find_one(Q(self.Model.SHORT_NAME, 'eq', self.sanction))
     self.user = self.reg.creator
def find_registration_file(value, node):
    """
    some annotations:

    - `value` is  the `extra` from a file upload in `registered_meta`
        (see `Uploader.addFile` in website/static/js/registrationEditorExtensions.js)
    - `node` is a Registration instance
    - returns a `(file_info, node_id)` or `(None, None)` tuple, where `file_info` is from waterbutler's api
        (see `addons.base.models.BaseStorageAddon._get_fileobj_child_metadata` and `waterbutler.core.metadata.BaseMetadata`)
    """
    from osf.models import AbstractNode
    orig_sha256 = value['sha256']
    orig_name = unescape_entities(
        value['selectedFileName'],
        safe={
            '&lt;': '<',
            '&gt;': '>'
        }
    )
    orig_node = value['nodeId']
    file_map = get_file_map(node)
    for sha256, file_info, node_id in file_map:
        registered_from_id = AbstractNode.load(node_id).registered_from._id
        if sha256 == orig_sha256 and registered_from_id == orig_node and orig_name == file_info['name']:
            return file_info, node_id
    return None, None
Example #14
0
def get_settings_url(uid, user):
    if uid == user._id:
        return web_url_for('user_notifications', _absolute=True)

    node = AbstractNode.load(uid)
    assert node, 'get_settings_url recieved an invalid Node id'
    return node.web_url_for('node_setting', _guid=True, _absolute=True)
Example #15
0
def archive_success(dst_pk, job_pk):
    """Archiver's final callback. For the time being the use case for this task
    is to rewrite references to files selected in a registration schema (the Prereg
    Challenge being the first to expose this feature). The created references point
    to files on the registered_from Node (needed for previewing schema data), and
    must be re-associated with the corresponding files in the newly created registration.

    :param str dst_pk: primary key of registration Node

    note:: At first glance this task makes redundant calls to utils.get_file_map (which
    returns a generator yielding  (<sha256>, <file_metadata>) pairs) on the dst Node. Two
    notes about utils.get_file_map: 1) this function memoizes previous results to reduce
    overhead and 2) this function returns a generator that lazily fetches the file metadata
    of child Nodes (it is possible for a selected file to belong to a child Node) using a
    non-recursive DFS. Combined this allows for a relatively effient implementation with
    seemingly redundant calls.
    """
    create_app_context()
    dst = Node.load(dst_pk)
    # The filePicker extension addded with the Prereg Challenge registration schema
    # allows users to select files in OSFStorage as their response to some schema
    # questions. These files are references to files on the unregistered Node, and
    # consequently we must migrate those file paths after archiver has run. Using
    # sha256 hashes is a convenient way to identify files post-archival.
    for schema in dst.registered_schema.all():
        if schema.has_files:
            utils.migrate_file_metadata(dst, schema)
    job = ArchiveJob.load(job_pk)
    if not job.sent:
        job.sent = True
        job.save()
        dst.sanction.ask(dst.get_active_contributors_recursive(unique_users=True))
    def get_paginated_response(self, data):
        """Add number of unread comments to links.meta when viewing list of comments filtered by
        a target node, file or wiki page."""
        response = super(CommentPagination, self).get_paginated_response(data)
        response_dict = response.data
        kwargs = self.request.parser_context['kwargs'].copy()

        if self.request.query_params.get('related_counts', False):
            target_id = self.request.query_params.get('filter[target]', None)
            node_id = kwargs.get('node_id', None)
            node = AbstractNode.load(node_id)
            user = self.request.user
            if target_id and not user.is_anonymous and node.is_contributor_or_group_member(user):
                root_target = Guid.load(target_id)
                if root_target:
                    page = getattr(root_target.referent, 'root_target_page', None)
                    if page:
                        if not len(data):
                            unread = 0
                        else:
                            unread = Comment.find_n_unread(user=user, node=node, page=page, root_id=target_id)
                        if self.request.version < '2.1':
                            response_dict['links']['meta']['unread'] = unread
                        else:
                            response_dict['meta']['unread'] = unread
        return Response(response_dict)
Example #17
0
def update_node_async(self, node_id, index=None, bulk=False):
    AbstractNode = apps.get_model('osf.AbstractNode')
    node = AbstractNode.load(node_id)
    try:
        update_node(node=node, index=index, bulk=bulk, async=True)
    except Exception as exc:
        self.retry(exc=exc)
Example #18
0
def fork_pointer(auth, node, **kwargs):
    """Fork a pointer. Raises BAD_REQUEST if pointer not provided, not found,
    or not present in `nodes`.

    :param Auth auth: Consolidated authorization
    :param Node node: root from which pointer is child
    :return: Fork of node to which nodelink(pointer) points
    """
    NodeRelation = apps.get_model('osf.NodeRelation')

    linked_node_id = request.json.get('nodeId')
    linked_node = AbstractNode.load(linked_node_id)
    pointer = NodeRelation.objects.filter(child=linked_node, is_node_link=True, parent=node).first()

    if pointer is None:
        # TODO: Change this to 404?
        raise HTTPError(http.BAD_REQUEST)

    try:
        fork = node.fork_pointer(pointer, auth=auth, save=True)
    except ValueError:
        raise HTTPError(http.BAD_REQUEST)

    return {
        'data': {
            'node': serialize_node_summary(node=fork, auth=auth, show_path=False)
        }
    }, http.CREATED
Example #19
0
 def setUp(self, *args, **kwargs):
     OsfTestCase.setUp(self, *args, **kwargs)
     if not self.kind:
         return
     self.sanction = self.Factory()
     self.reg = Node.find_one(Q(self.Model.SHORT_NAME, 'eq', self.sanction))
     self.user = self.reg.creator
Example #20
0
def _send_global_and_node_emails(send_type):
    """
    Called by `send_users_email`. Send all global and node-related notification emails.
    """
    grouped_emails = get_users_emails(send_type)
    for group in grouped_emails:
        user = OSFUser.load(group['user_id'])
        if not user:
            log_exception()
            continue
        info = group['info']
        notification_ids = [message['_id'] for message in info]
        sorted_messages = group_by_node(info)
        if sorted_messages:
            if not user.is_disabled:
                # If there's only one node in digest we can show it's preferences link in the template.
                notification_nodes = list(sorted_messages['children'].keys())
                node = AbstractNode.load(notification_nodes[0]) if len(
                    notification_nodes) == 1 else None
                mails.send_mail(
                    to_addr=user.username,
                    can_change_node_preferences=bool(node),
                    node=node,
                    mail=mails.DIGEST,
                    name=user.fullname,
                    message=sorted_messages,
                )
            remove_notifications(email_notification_ids=notification_ids)
Example #21
0
    def handle(self, *args, **options):
        guids = options.get('guids', [])
        flag = options.get('flag', False)

        for guid in guids:
            logger.info('Checking Node {}...'.format(guid))
            check_spam(AbstractNode.load(guid), flag=flag)
Example #22
0
def compile_subscriptions(node, event_type, event=None, level=0):
    """Recurse through node and parents for subscriptions.

    :param node: current node
    :param event_type: Generally node_subscriptions_available
    :param event: Particular event such a file_updated that has specific file subs
    :param level: How deep the recursion is
    :return: a dict of notification types with lists of users.
    """
    subscriptions = check_node(node, event_type)
    if event:
        subscriptions = check_node(node, event)  # Gets particular event subscriptions
        parent_subscriptions = compile_subscriptions(node, event_type, level=level + 1)  # get node and parent subs
    elif getattr(node, 'parent_id', False):
        parent_subscriptions = \
            compile_subscriptions(AbstractNode.load(node.parent_id), event_type, level=level + 1)
    else:
        parent_subscriptions = check_node(None, event_type)
    for notification_type in parent_subscriptions:
        p_sub_n = parent_subscriptions[notification_type]
        p_sub_n.extend(subscriptions[notification_type])
        for nt in subscriptions:
            if notification_type != nt:
                p_sub_n = list(set(p_sub_n).difference(set(subscriptions[nt])))
        if level == 0:
            p_sub_n, removed = utils.separate_users(node, p_sub_n)
        parent_subscriptions[notification_type] = p_sub_n
    return parent_subscriptions
Example #23
0
    def handle(self, *args, **options):
        guids = options.get('guids', [])
        flag = options.get('flag', False)

        for guid in guids:
            logger.info('Checking Node {}...'.format(guid))
            check_spam(AbstractNode.load(guid), flag=flag)
Example #24
0
def archive_success(dst_pk, job_pk):
    """Archiver's final callback. For the time being the use case for this task
    is to rewrite references to files selected in a registration schema (the Prereg
    Challenge being the first to expose this feature). The created references point
    to files on the registered_from Node (needed for previewing schema data), and
    must be re-associated with the corresponding files in the newly created registration.

    :param str dst_pk: primary key of registration Node

    note:: At first glance this task makes redundant calls to utils.get_file_map (which
    returns a generator yielding  (<sha256>, <file_metadata>) pairs) on the dst Node. Two
    notes about utils.get_file_map: 1) this function memoizes previous results to reduce
    overhead and 2) this function returns a generator that lazily fetches the file metadata
    of child Nodes (it is possible for a selected file to belong to a child Node) using a
    non-recursive DFS. Combined this allows for a relatively effient implementation with
    seemingly redundant calls.
    """
    create_app_context()
    dst = Node.load(dst_pk)
    # The filePicker extension addded with the Prereg Challenge registration schema
    # allows users to select files in OSFStorage as their response to some schema
    # questions. These files are references to files on the unregistered Node, and
    # consequently we must migrate those file paths after archiver has run. Using
    # sha256 hashes is a convenient way to identify files post-archival.
    for schema in dst.registered_schema.all():
        if schema.has_files:
            utils.migrate_file_metadata(dst, schema)
    job = ArchiveJob.load(job_pk)
    if not job.sent:
        job.sent = True
        job.save()
        dst.sanction.ask(
            dst.get_active_contributors_recursive(unique_users=True))
    def test_bulk_creates_children_and_sanitizes_html_logged_in_owner(
            self, app, user, project, url):
        title = '<em>Reasoning</em> <strong>Aboot Projects</strong>'
        description = 'A <script>alert("super reasonable")</script> child'

        res = app.post_json_api(url, {
            'data': [{
                'type': 'nodes',
                'attributes': {
                    'title': title,
                    'description': description,
                    'category': 'project',
                    'public': True
                }
            }]
        }, auth=user.auth, bulk=True)
        child_id = res.json['data'][0]['id']
        assert res.status_code == 201
        url = '/{}nodes/{}/'.format(API_BASE, child_id)

        res = app.get(url, auth=user.auth)
        assert res.json['data']['attributes']['title'] == strip_html(title)
        assert res.json['data']['attributes']['description'] == strip_html(
            description)
        assert res.json['data']['attributes']['category'] == 'project'

        project.reload()
        child_id = res.json['data']['id']
        assert child_id == project.nodes[0]._id
        assert AbstractNode.load(child_id).logs.latest(
        ).action == NodeLog.PROJECT_CREATED
    def test_bulk_creates_children_child_logged_in_write_contributor(
            self, app, user, project, child_one, child_two, url):
        write_contrib = AuthUserFactory()
        project.add_contributor(write_contrib,
                                permissions=permissions.WRITE,
                                auth=Auth(user),
                                save=True)

        res = app.post_json_api(url, {'data': [child_one, child_two]},
                                auth=write_contrib.auth,
                                bulk=True)
        assert res.status_code == 201
        assert res.json['data'][0]['attributes']['title'] == child_one[
            'attributes']['title']
        assert res.json['data'][0]['attributes']['description'] == child_one[
            'attributes']['description']
        assert res.json['data'][0]['attributes']['category'] == child_one[
            'attributes']['category']
        assert res.json['data'][1]['attributes']['title'] == child_two[
            'attributes']['title']
        assert res.json['data'][1]['attributes']['description'] == child_two[
            'attributes']['description']
        assert res.json['data'][1]['attributes']['category'] == child_two[
            'attributes']['category']

        project.reload()
        child_id = res.json['data'][0]['id']
        child_two_id = res.json['data'][1]['id']
        nodes = project.nodes
        assert child_id == nodes[0]._id
        assert child_two_id == nodes[1]._id

        assert AbstractNode.load(
            child_id).logs.latest().action == NodeLog.PROJECT_CREATED
        assert nodes[1].logs.latest().action == NodeLog.PROJECT_CREATED
Example #27
0
def conference_submissions(**kwargs):
    """Return data for all OSF4M submissions.

    The total number of submissions for each meeting is calculated and cached
    in the Conference.num_submissions field.
    """
    conferences = Conference.find(Q('is_meeting', 'ne', False))
    #  TODO: Revisit this loop, there has to be a way to optimize it
    for conf in conferences:
        # For efficiency, we filter by tag first, then node
        # instead of doing a single Node query
        projects = set()

        tags = Tag.find(Q('system', 'eq', False) & Q('name', 'iexact', conf.endpoint.lower())).values_list('pk', flat=True)
        nodes = Node.find(
            Q('tags', 'in', tags) &
            Q('is_public', 'eq', True) &
            Q('is_deleted', 'ne', True)
        ).include('guids')
        projects.update(list(nodes))
        num_submissions = len(projects)
        # Cache the number of submissions
        conf.num_submissions = num_submissions
    bulk_update(conferences, update_fields=['num_submissions'])
    return {'success': True}
Example #28
0
    def has_object_permission(self, request, view, obj):
        assert isinstance(obj, dict)
        auth = get_user_auth(request)
        collection = obj['self']
        has_collection_auth = auth.user and auth.user.has_perm(
            'write_collection', collection)

        if request.method in permissions.SAFE_METHODS:
            if collection.is_public:
                return True
        elif request.method == 'DELETE':
            return has_collection_auth

        if not has_collection_auth:
            return False
        pointer_objects = []
        for pointer in request.data.get('data', []):
            obj = AbstractNode.load(pointer['id']) or Preprint.load(
                pointer['id'])
            if not obj:
                raise NotFound(detail='Node with id "{}" was not found'.format(
                    pointer['id']))
            pointer_objects.append(obj)
        has_pointer_auth = True
        # TODO: is this necessary? get_object checks can_view
        for pointer in pointer_objects:
            if not pointer.can_view(auth):
                has_pointer_auth = False
                break
        return has_pointer_auth
Example #29
0
    def create(self, validated_data):
        inst = self.context['view'].get_object()['self']
        user = self.context['request'].user
        node_dicts = validated_data['data']

        changes_flag = False
        for node_dict in node_dicts:
            node = AbstractNode.load(node_dict['_id'])
            if not node:
                raise exceptions.NotFound(
                    detail='AbstractNode with id "{}" was not found'.format(
                        node_dict['_id']))
            if not node.has_permission(user, osf_permissions.WRITE):
                raise exceptions.PermissionDenied(
                    detail='Write permission on node {} required'.format(
                        node_dict['_id']))
            if not node.is_affiliated_with_institution(inst):
                node.add_affiliated_institution(inst, user, save=True)
                changes_flag = True

        if not changes_flag:
            raise RelationshipPostMakesNoChanges

        ConcreteNode = apps.get_model('osf.Node')
        return {
            'data':
            list(
                ConcreteNode.find_by_institutions(inst,
                                                  Q('is_deleted', 'ne',
                                                    True))),
            'self':
            inst
        }
Example #30
0
def conference_submissions(**kwargs):
    """Return data for all OSF4M submissions.

    The total number of submissions for each meeting is calculated and cached
    in the Conference.num_submissions field.
    """
    conferences = Conference.find(Q('is_meeting', 'ne', False))
    #  TODO: Revisit this loop, there has to be a way to optimize it
    for conf in conferences:
        # For efficiency, we filter by tag first, then node
        # instead of doing a single Node query
        projects = set()

        tags = Tag.find(
            Q('system', 'eq', False)
            & Q('name', 'iexact', conf.endpoint.lower())).values_list(
                'pk', flat=True)
        nodes = AbstractNode.find(
            Q('tags', 'in', tags) & Q('is_public', 'eq', True)
            & Q('is_deleted', 'ne', True)).include('guids')
        projects.update(list(nodes))
        num_submissions = len(projects)
        # Cache the number of submissions
        conf.num_submissions = num_submissions
    bulk_update(conferences, update_fields=['num_submissions'])
    return {'success': True}
    def test_bulk_creates_children_child_logged_in_write_contributor(
            self, app, user, project, child_one, child_two, url):
        write_contrib = AuthUserFactory()
        project.add_contributor(
            write_contrib,
            permissions=[
                permissions.READ,
                permissions.WRITE],
            auth=Auth(user),
            save=True)

        res = app.post_json_api(
            url,
            {'data': [child_one, child_two]},
            auth=write_contrib.auth, bulk=True)
        assert res.status_code == 201
        assert res.json['data'][0]['attributes']['title'] == child_one['attributes']['title']
        assert res.json['data'][0]['attributes']['description'] == child_one['attributes']['description']
        assert res.json['data'][0]['attributes']['category'] == child_one['attributes']['category']
        assert res.json['data'][1]['attributes']['title'] == child_two['attributes']['title']
        assert res.json['data'][1]['attributes']['description'] == child_two['attributes']['description']
        assert res.json['data'][1]['attributes']['category'] == child_two['attributes']['category']

        project.reload()
        child_id = res.json['data'][0]['id']
        child_two_id = res.json['data'][1]['id']
        nodes = project.nodes
        assert child_id == nodes[0]._id
        assert child_two_id == nodes[1]._id

        assert AbstractNode.load(child_id).logs.latest(
        ).action == NodeLog.PROJECT_CREATED
        assert nodes[1].logs.latest().action == NodeLog.PROJECT_CREATED
Example #32
0
    def get_paginated_response(self, data):
        """Add number of unread comments to links.meta when viewing list of comments filtered by
        a target node, file or wiki page."""
        response = super(CommentPagination, self).get_paginated_response(data)
        response_dict = response.data
        kwargs = self.request.parser_context['kwargs'].copy()

        if self.request.query_params.get('related_counts', False):
            target_id = self.request.query_params.get('filter[target]', None)
            node_id = kwargs.get('node_id', None)
            node = Node.load(node_id)
            user = self.request.user
            if target_id and not user.is_anonymous and node.is_contributor(user):
                root_target = Guid.load(target_id)
                if root_target:
                    page = getattr(root_target.referent, 'root_target_page', None)
                    if page:
                        if not len(data):
                            unread = 0
                        else:
                            unread = Comment.find_n_unread(user=user, node=node, page=page, root_id=target_id)
                        if self.request.version < '2.1':
                            response_dict['links']['meta']['unread'] = unread
                        else:
                            response_dict['meta']['unread'] = unread
        return Response(response_dict)
Example #33
0
def _send_global_and_node_emails(send_type):
    """
    Called by `send_users_email`. Send all global and node-related notification emails.
    """
    grouped_emails = get_users_emails(send_type)
    for group in grouped_emails:
        user = OSFUser.load(group['user_id'])
        if not user:
            log_exception()
            continue
        info = group['info']
        notification_ids = [message['_id'] for message in info]
        sorted_messages = group_by_node(info)
        if sorted_messages:
            if not user.is_disabled:
                # If there's only one node in digest we can show it's preferences link in the template.
                notification_nodes = sorted_messages['children'].keys()
                node = AbstractNode.load(notification_nodes[0]) if len(
                    notification_nodes) == 1 else None
                mails.send_mail(
                    to_addr=user.username,
                    mimetype='html',
                    can_change_node_preferences=bool(node),
                    node=node,
                    mail=mails.DIGEST,
                    name=user.fullname,
                    message=sorted_messages,
                )
            remove_notifications(email_notification_ids=notification_ids)
Example #34
0
    def has_object_permission(self, request, view, obj):
        assert isinstance(obj, dict)
        auth = get_user_auth(request)
        parent_node = obj['self']

        if request.method in permissions.SAFE_METHODS:
            return parent_node.can_view(auth)
        elif request.method == 'DELETE':
            return parent_node.can_edit(auth)
        else:
            has_parent_auth = parent_node.can_edit(auth)
            if not has_parent_auth:
                return False
            pointer_nodes = []
            for pointer in request.data.get('data', []):
                node = AbstractNode.load(pointer['id'])
                if not node or node.is_collection:
                    raise exceptions.NotFound(detail='Node with id "{}" was not found'.format(pointer['id']))
                pointer_nodes.append(node)
            has_pointer_auth = True
            for pointer in pointer_nodes:
                if not pointer.can_view(auth):
                    has_pointer_auth = False
                    break
            return has_pointer_auth
Example #35
0
def send_users_email(send_type):
    """Find pending Emails and amalgamates them into a single Email.

    :param send_type
    :return:
    """
    grouped_emails = get_users_emails(send_type)
    for group in grouped_emails:
        user = OSFUser.load(group['user_id'])
        if not user:
            log_exception()
            continue
        info = group['info']
        notification_ids = [message['_id'] for message in info]
        sorted_messages = group_by_node(info)
        if sorted_messages:
            if not user.is_disabled:
                # If there's only one node in digest we can show it's preferences link in the template.
                notification_nodes = sorted_messages['children'].keys()
                node = AbstractNode.load(notification_nodes[0]) if len(
                    notification_nodes) == 1 else None
                mails.send_mail(
                    to_addr=user.username,
                    mimetype='html',
                    can_change_node_preferences=bool(node),
                    node=node,
                    mail=mails.DIGEST,
                    name=user.fullname,
                    message=sorted_messages,
                )
            remove_notifications(email_notification_ids=notification_ids)
Example #36
0
def add_pointer(auth):
    """Add a single pointer to a node using only JSON parameters

    """
    to_node_id = request.json.get('toNodeID')
    pointer_to_move = request.json.get('pointerID')

    if not (to_node_id and pointer_to_move):
        raise HTTPError(http.BAD_REQUEST)

    pointer = Node.load(pointer_to_move)
    to_node = Node.load(to_node_id)
    try:
        _add_pointers(to_node, [pointer], auth)
    except ValueError:
        raise HTTPError(http.BAD_REQUEST)
Example #37
0
 def has_object_permission(self, request, view, obj):
     if not isinstance(obj, AbstractNode):
         obj = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg])
     assert isinstance(obj, AbstractNode), 'obj must be an Node'
     if obj.is_registration:
         return request.method in permissions.SAFE_METHODS
     return True
Example #38
0
def update_node_async(self, node_id, index=None, bulk=False):
    AbstractNode = apps.get_model('osf.AbstractNode')
    node = AbstractNode.load(node_id)
    try:
        update_node(node=node, index=index, bulk=bulk, async=True)
    except Exception as exc:
        self.retry(exc=exc)
Example #39
0
def add_pointer(auth):
    """Add a single pointer to a node using only JSON parameters

    """
    to_node_id = request.json.get('toNodeID')
    pointer_to_move = request.json.get('pointerID')

    if not (to_node_id and pointer_to_move):
        raise HTTPError(http.BAD_REQUEST)

    pointer = AbstractNode.load(pointer_to_move)
    to_node = AbstractNode.load(to_node_id)
    try:
        _add_pointers(to_node, [pointer], auth)
    except ValueError:
        raise HTTPError(http.BAD_REQUEST)
Example #40
0
def get_enabled_authorized_linked(user_settings_list, has_external_account, short_name):
    """ Gather the number of users who have at least one node in each of the stages for an addon

    :param user_settings_list: list of user_settings for a particualr addon
    :param has_external_account: where addon is derrived from, determines method to load node settings
    :param short_name: short name of addon to get correct node_settings
    :return:  dict with number of users that have at least one project at each stage
    """
    from addons.forward.models import NodeSettings as ForwardNodeSettings

    num_enabled = 0  # of users w/ 1+ addon account connected
    num_authorized = 0  # of users w/ 1+ addon account connected to 1+ node
    num_linked = 0  # of users w/ 1+ addon account connected to 1+ node and configured

    # osfstorage and wiki don't have user_settings, so always assume they're enabled, authorized, linked
    if short_name == 'osfstorage' or short_name == 'wiki':
        num_enabled = num_authorized = num_linked = OSFUser.objects.filter(
            is_registered=True,
            password__isnull=False,
            merged_by__isnull=True,
            date_disabled__isnull=True,
            date_confirmed__isnull=False
        ).count()

    elif short_name == 'forward':
        num_enabled = num_authorized = ForwardNodeSettings.objects.count()
        num_linked = ForwardNodeSettings.objects.filter(url__isnull=False).count()

    else:
        for user_settings in paginated(user_settings_list):
            node_settings_list = []
            if has_external_account:
                if user_settings.has_auth:
                    num_enabled += 1
                    node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.oauth_grants.keys()]
            else:
                num_enabled += 1
                node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.nodes_authorized]
            if any([ns.has_auth for ns in node_settings_list if ns]):
                num_authorized += 1
                if any([(ns.complete and ns.configured) for ns in node_settings_list if ns]):
                    num_linked += 1
    return {
        'enabled': num_enabled,
        'authorized': num_authorized,
        'linked': num_linked
    }
Example #41
0
def get_enabled_authorized_linked(user_settings_list, has_external_account, short_name):
    """ Gather the number of users who have at least one node in each of the stages for an addon

    :param user_settings_list: list of user_settings for a particualr addon
    :param has_external_account: where addon is derrived from, determines method to load node settings
    :param short_name: short name of addon to get correct node_settings
    :return:  dict with number of users that have at least one project at each stage
    """
    from addons.forward.models import NodeSettings as ForwardNodeSettings

    num_enabled = 0  # of users w/ 1+ addon account connected
    num_authorized = 0  # of users w/ 1+ addon account connected to 1+ node
    num_linked = 0  # of users w/ 1+ addon account connected to 1+ node and configured

    # osfstorage and wiki don't have user_settings, so always assume they're enabled, authorized, linked
    if short_name == 'osfstorage' or short_name == 'wiki':
        num_enabled = num_authorized = num_linked = OSFUser.find(
            Q('is_registered', 'eq', True) &
            Q('password', 'ne', None) &
            Q('merged_by', 'eq', None) &
            Q('date_disabled', 'eq', None) &
            Q('date_confirmed', 'ne', None)
        ).count()

    elif short_name == 'forward':
        num_enabled = num_authorized = ForwardNodeSettings.find().count()
        num_linked = ForwardNodeSettings.find(Q('url', 'ne', None)).count()

    else:
        for user_settings in paginated(user_settings_list):
            node_settings_list = []
            if has_external_account:
                if user_settings.has_auth:
                    num_enabled += 1
                    node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.oauth_grants.keys()]
            else:
                num_enabled += 1
                node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.nodes_authorized]
            if any([ns.has_auth for ns in node_settings_list if ns]):
                num_authorized += 1
                if any([(ns.complete and ns.configured) for ns in node_settings_list if ns]):
                    num_linked += 1
    return {
        'enabled': num_enabled,
        'authorized': num_authorized,
        'linked': num_linked
    }
Example #42
0
def sanction_handler(kind, action, payload, encoded_token, auth, **kwargs):
    from osf.models import (AbstractNode, Embargo, EmbargoTerminationApproval,
                            RegistrationApproval, Retraction)

    Model = {
        'registration': RegistrationApproval,
        'embargo': Embargo,
        'embargo_termination_approval': EmbargoTerminationApproval,
        'retraction': Retraction
    }.get(kind, None)
    if not Model:
        raise UnsupportedSanctionHandlerKind

    sanction_id = payload.get('sanction_id', None)
    sanction = Model.load(sanction_id)

    err_code = None
    err_message = None
    if not sanction:
        err_code = http.BAD_REQUEST
        err_message = 'There is no {0} associated with this token.'.format(
            markupsafe.escape(Model.DISPLAY_NAME))
    elif sanction.is_approved:
        # Simply strip query params and redirect if already approved
        return redirect(request.base_url)
    elif sanction.is_rejected:
        err_code = http.GONE if kind in ['registration', 'embargo'
                                         ] else http.BAD_REQUEST
        err_message = 'This registration {0} has been rejected.'.format(
            markupsafe.escape(sanction.DISPLAY_NAME))
    if err_code:
        raise HTTPError(err_code, data=dict(message_long=err_message))

    do_action = getattr(sanction, action, None)
    if do_action:
        registration = AbstractNode.find_one(
            Q(sanction.SHORT_NAME, 'eq', sanction))
        registered_from = registration.registered_from
        try:
            do_action(auth.user, encoded_token)
        except TokenError as e:
            raise HTTPError(http.BAD_REQUEST,
                            data={
                                'message_short': e.message_short,
                                'message_long': e.message_long
                            })
        except PermissionsError as e:
            raise HTTPError(http.UNAUTHORIZED,
                            data={
                                'message_short': 'Unauthorized access',
                                'message_long': e.message
                            })
        sanction.save()
        return {
            'registration': registration_approval_handler,
            'embargo': embargo_handler,
            'embargo_termination_approval': embargo_termination_handler,
            'retraction': retraction_handler,
        }[kind](action, registration, registered_from)
Example #43
0
    def test_conference_submissions(self):
        Node.remove()
        conference1 = ConferenceFactory()
        conference2 = ConferenceFactory()
        # Create conference nodes
        create_fake_conference_nodes(
            3,
            conference1.endpoint,
        )
        create_fake_conference_nodes(
            2,
            conference2.endpoint,
        )

        url = api_url_for('conference_submissions')
        res = self.app.get(url)
        assert_equal(res.json['success'], True)
Example #44
0
    def test_conference_submissions(self):
        AbstractNode.remove()
        conference1 = ConferenceFactory()
        conference2 = ConferenceFactory()
        # Create conference nodes
        create_fake_conference_nodes(
            3,
            conference1.endpoint,
        )
        create_fake_conference_nodes(
            2,
            conference2.endpoint,
        )

        url = api_url_for('conference_submissions')
        res = self.app.get(url)
        assert_equal(res.json['success'], True)
Example #45
0
def search_node(auth, **kwargs):
    """

    """
    # Get arguments
    node = AbstractNode.load(request.json.get('nodeId'))
    include_public = request.json.get('includePublic')
    size = float(request.json.get('size', '5').strip())
    page = request.json.get('page', 0)
    query = request.json.get('query', '').strip()

    start = (page * size)
    if not query:
        return {'nodes': []}

    # Build ODM query
    title_query = Q('title', 'icontains', query)
    not_deleted_query = Q('is_deleted', 'eq', False)
    visibility_query = Q('contributors', 'eq', auth.user)
    if include_public:
        visibility_query = visibility_query | Q('is_public', 'eq', True)
    odm_query = title_query & not_deleted_query & visibility_query

    # Exclude current node from query if provided
    nin = [node.id] + list(node._nodes.values_list('pk',
                                                   flat=True)) if node else []

    nodes = AbstractNode.find(odm_query).exclude(id__in=nin).exclude(
        type='osf.collection')
    count = nodes.count()
    pages = math.ceil(count / size)
    validate_page_num(page, pages)

    return {
        'nodes': [
            _serialize_node_search(each)
            for each in islice(nodes, start, start + size) if each.contributors
        ],
        'total':
        count,
        'pages':
        pages,
        'page':
        page
    }
Example #46
0
 def on_delete(self):
     """When the user deactivates the addon, clear auth for connected nodes.
     """
     super(AddonOAuthUserSettingsBase, self).on_delete()
     nodes = [Node.load(node_id) for node_id in self.oauth_grants.keys()]
     for node in nodes:
         node_addon = node.get_addon(self.oauth_provider.short_name)
         if node_addon and node_addon.user_settings == self:
             node_addon.clear_auth()
Example #47
0
 def has_object_permission(self, request, view, obj):
     if isinstance(obj, Node):
         node = obj
     else:
         context = request.parser_context['kwargs']
         node = AbstractNode.load(context[view.node_lookup_url_kwarg])
     if node.is_retracted:
         return False
     return True
Example #48
0
 def test_integration(self, mock_upload, mock_send_mail):
     fullname = 'John Deacon'
     username = '******'
     title = 'good songs'
     conference = ConferenceFactory()
     body = 'dragon on my back'
     content = 'dragon attack'
     recipient = '{0}{1}[email protected]'.format(
         'test-' if settings.DEV_MODE else '',
         conference.endpoint,
     )
     self.app.post(
         api_url_for('meeting_hook'),
         {
             'X-Mailgun-Sscore':
             0,
             'timestamp':
             '123',
             'token':
             'secret',
             'signature':
             hmac.new(
                 key=settings.MAILGUN_API_KEY,
                 msg='{}{}'.format('123', 'secret'),
                 digestmod=hashlib.sha256,
             ).hexdigest(),
             'attachment-count':
             '1',
             'X-Mailgun-Sscore':
             0,
             'from':
             '{0} <{1}>'.format(fullname, username),
             'recipient':
             recipient,
             'subject':
             title,
             'stripped-text':
             body,
         },
         upload_files=[
             ('attachment-1', 'attachment-1', content),
         ],
     )
     assert_true(mock_upload.called)
     users = OSFUser.find(Q('username', 'eq', username))
     assert_equal(users.count(), 1)
     nodes = AbstractNode.find(Q('title', 'eq', title))
     assert_equal(nodes.count(), 1)
     node = nodes[0]
     assert_equal(node.get_wiki_page('home').content, body)
     assert_true(mock_send_mail.called)
     call_args, call_kwargs = mock_send_mail.call_args
     assert_absolute(call_kwargs['conf_view_url'])
     assert_absolute(call_kwargs['set_password_url'])
     assert_absolute(call_kwargs['profile_url'])
     assert_absolute(call_kwargs['file_url'])
     assert_absolute(call_kwargs['node_url'])
Example #49
0
 def has_object_permission(self, request, view, obj):
     if isinstance(obj, Node):
         node = obj
     else:
         context = request.parser_context['kwargs']
         node = AbstractNode.load(context[view.node_lookup_url_kwarg])
     if node.is_retracted:
         return False
     return True
Example #50
0
 def get_queryset(self):
     blacklisted = self.is_blacklisted()
     nodes = self.get_queryset_from_request().distinct()
     # If attempting to filter on a blacklisted field, exclude withdrawals.
     if blacklisted:
         non_withdrawn_list = [node._id for node in nodes if not node.is_retracted]
         non_withdrawn_nodes = Node.find(Q('_id', 'in', non_withdrawn_list))
         return non_withdrawn_nodes
     return nodes
Example #51
0
    def __init__(self, user, node, event, payload=None):
        super(ComplexFileEvent, self).__init__(user,
                                               node,
                                               event,
                                               payload=payload)

        self.source_node = AbstractNode.load(
            self.payload['source']['node']['_id'])
        self.addon = self.node.get_addon(
            self.payload['destination']['provider'])
Example #52
0
def main(dry=True):
    systagfile = sys.argv[1]
    with open(systagfile, 'r') as fp:
        systag_data = json.load(fp)
        for node_id, systags in systag_data.iteritems():
            node = AbstractNode.load(node_id)
            for systag in systags:
                logger.info('Adding {} as a system tag to AbstractNode {}'.format(systag, node._id))
                if not dry:
                    node.add_system_tag(systag, save=True)
Example #53
0
 def get_node_title(self, obj):
     user = self.context['request'].user
     node_title = obj['node']['title']
     node = AbstractNode.load(obj['node']['_id'])
     if not user.is_authenticated:
         if node.is_public:
             return node_title
     elif node.has_permission(user, osf_permissions.READ):
         return node_title
     return 'Private Component'
Example #54
0
def search_contributor(auth):
    user = auth.user if auth else None
    nid = request.args.get('excludeNode')
    exclude = AbstractNode.load(nid).contributors if nid else []
    # TODO: Determine whether bleach is appropriate for ES payload. Also, inconsistent with website.sanitize.util.strip_html
    query = bleach.clean(request.args.get('query', ''), tags=[], strip=True)
    page = int(bleach.clean(request.args.get('page', '0'), tags=[], strip=True))
    size = int(bleach.clean(request.args.get('size', '5'), tags=[], strip=True))
    return search.search_contributor(query=query, page=page, size=size,
                                     exclude=exclude, current_user=user)
Example #55
0
def search_contributor(auth):
    user = auth.user if auth else None
    nid = request.args.get('excludeNode')
    exclude = AbstractNode.load(nid).contributors if nid else []
    # TODO: Determine whether bleach is appropriate for ES payload. Also, inconsistent with website.sanitize.util.strip_html
    query = bleach.clean(request.args.get('query', ''), tags=[], strip=True)
    page = int(bleach.clean(request.args.get('page', '0'), tags=[], strip=True))
    size = int(bleach.clean(request.args.get('size', '5'), tags=[], strip=True))
    return search.search_contributor(query=query, page=page, size=size,
                                     exclude=exclude, current_user=user)
Example #56
0
 def get_node_title(self, obj):
     user = self.context['request'].user
     node_title = obj['node']['title']
     node = AbstractNode.load(obj['node']['_id'])
     if not user.is_authenticated:
         if node.is_public:
             return node_title
     elif node.has_permission(user, osf_permissions.READ):
         return node_title
     return 'Private Component'
Example #57
0
def load_parent(parent_id):
    parent = AbstractNode.load(parent_id)
    if parent and parent.is_public:
        return {
            'title': parent.title,
            'url': parent.url,
            'id': parent._id,
            'is_registation': parent.is_registration,
        }
    return None
Example #58
0
def load_parent(parent_id):
    parent = AbstractNode.load(parent_id)
    if parent and parent.is_public:
        return {
            'title': parent.title,
            'url': parent.url,
            'id': parent._id,
            'is_registation': parent.is_registration,
        }
    return None
Example #59
0
    def has_object_permission(self, request, view, obj):
        # Preprints cannot be registrations
        if isinstance(obj, Preprint):
            return True

        if not isinstance(obj, AbstractNode):
            obj = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg])
        assert_resource_type(obj, self.acceptable_models)
        if obj.is_registration:
            return request.method in permissions.SAFE_METHODS
        return True
Example #60
0
def search_node(auth, **kwargs):
    """

    """
    # Get arguments
    node = Node.load(request.json.get('nodeId'))
    include_public = request.json.get('includePublic')
    size = float(request.json.get('size', '5').strip())
    page = request.json.get('page', 0)
    query = request.json.get('query', '').strip()

    start = (page * size)
    if not query:
        return {'nodes': []}

    # Build ODM query
    title_query = Q('title', 'icontains', query)
    not_deleted_query = Q('is_deleted', 'eq', False)
    visibility_query = Q('contributors', 'eq', auth.user)
    if include_public:
        visibility_query = visibility_query | Q('is_public', 'eq', True)
    odm_query = title_query & not_deleted_query & visibility_query

    # Exclude current node from query if provided
    nin = [node.id] + list(node._nodes.values_list('pk', flat=True)) if node else []

    nodes = Node.find(odm_query).exclude(id__in=nin).exclude(type='osf.collection')
    count = nodes.count()
    pages = math.ceil(count / size)
    validate_page_num(page, pages)

    return {
        'nodes': [
            _serialize_node_search(each)
            for each in islice(nodes, start, start + size)
            if each.contributors
        ],
        'total': count,
        'pages': pages,
        'page': page
    }