def activity(): """Reads node activity from pre-generated popular projects and registrations. New and Noteworthy projects are set manually or through `scripts/populate_new_and_noteworthy_projects.py` Popular projects and registrations are generated by `scripts/populate_popular_projects_and_registrations.py` """ # Prevent circular import from osf.models import AbstractNode as Node # New and Noreworthy Projects try: new_and_noteworthy_projects = Node.load(settings.NEW_AND_NOTEWORTHY_LINKS_NODE).nodes_pointer except AttributeError: new_and_noteworthy_projects = [] # Popular Projects try: popular_public_projects = Node.load(settings.POPULAR_LINKS_NODE).nodes_pointer except AttributeError: popular_public_projects = [] # Popular Registrations try: popular_public_registrations = Node.load(settings.POPULAR_LINKS_REGISTRATIONS).nodes_pointer except AttributeError: popular_public_registrations = [] return { 'new_and_noteworthy_projects': new_and_noteworthy_projects, 'recent_public_registrations': utils.recent_public_registrations(), 'popular_public_projects': popular_public_projects, 'popular_public_registrations': popular_public_registrations, }
def update_node_async(self, node_id, index=None, bulk=False): AbstractNode = apps.get_model('osf.AbstractNode') node = AbstractNode.load(node_id) try: update_node(node=node, index=index, bulk=bulk, async=True) except Exception as exc: self.retry(exc=exc)
def archive_success(dst_pk, job_pk): """Archiver's final callback. For the time being the use case for this task is to rewrite references to files selected in a registration schema (the Prereg Challenge being the first to expose this feature). The created references point to files on the registered_from Node (needed for previewing schema data), and must be re-associated with the corresponding files in the newly created registration. :param str dst_pk: primary key of registration Node note:: At first glance this task makes redundant calls to utils.get_file_map (which returns a generator yielding (<sha256>, <file_metadata>) pairs) on the dst Node. Two notes about utils.get_file_map: 1) this function memoizes previous results to reduce overhead and 2) this function returns a generator that lazily fetches the file metadata of child Nodes (it is possible for a selected file to belong to a child Node) using a non-recursive DFS. Combined this allows for a relatively effient implementation with seemingly redundant calls. """ create_app_context() dst = Node.load(dst_pk) # The filePicker extension addded with the Prereg Challenge registration schema # allows users to select files in OSFStorage as their response to some schema # questions. These files are references to files on the unregistered Node, and # consequently we must migrate those file paths after archiver has run. Using # sha256 hashes is a convenient way to identify files post-archival. for schema in dst.registered_schema.all(): if schema.has_files: utils.migrate_file_metadata(dst, schema) job = ArchiveJob.load(job_pk) if not job.sent: job.sent = True job.save() dst.sanction.ask(dst.get_active_contributors_recursive(unique_users=True))
def test_bulk_creates_children_child_logged_in_write_contributor( self, app, user, project, child_one, child_two, url): write_contrib = AuthUserFactory() project.add_contributor( write_contrib, permissions=[ permissions.READ, permissions.WRITE], auth=Auth(user), save=True) res = app.post_json_api( url, {'data': [child_one, child_two]}, auth=write_contrib.auth, bulk=True) assert res.status_code == 201 assert res.json['data'][0]['attributes']['title'] == child_one['attributes']['title'] assert res.json['data'][0]['attributes']['description'] == child_one['attributes']['description'] assert res.json['data'][0]['attributes']['category'] == child_one['attributes']['category'] assert res.json['data'][1]['attributes']['title'] == child_two['attributes']['title'] assert res.json['data'][1]['attributes']['description'] == child_two['attributes']['description'] assert res.json['data'][1]['attributes']['category'] == child_two['attributes']['category'] project.reload() child_id = res.json['data'][0]['id'] child_two_id = res.json['data'][1]['id'] nodes = project.nodes assert child_id == nodes[0]._id assert child_two_id == nodes[1]._id assert AbstractNode.load(child_id).logs.latest( ).action == NodeLog.PROJECT_CREATED assert nodes[1].logs.latest().action == NodeLog.PROJECT_CREATED
def add_pointer(auth): """Add a single pointer to a node using only JSON parameters """ to_node_id = request.json.get('toNodeID') pointer_to_move = request.json.get('pointerID') if not (to_node_id and pointer_to_move): raise HTTPError(http.BAD_REQUEST) pointer = Node.load(pointer_to_move) to_node = Node.load(to_node_id) try: _add_pointers(to_node, [pointer], auth) except ValueError: raise HTTPError(http.BAD_REQUEST)
def compile_subscriptions(node, event_type, event=None, level=0): """Recurse through node and parents for subscriptions. :param node: current node :param event_type: Generally node_subscriptions_available :param event: Particular event such a file_updated that has specific file subs :param level: How deep the recursion is :return: a dict of notification types with lists of users. """ subscriptions = check_node(node, event_type) if event: subscriptions = check_node(node, event) # Gets particular event subscriptions parent_subscriptions = compile_subscriptions(node, event_type, level=level + 1) # get node and parent subs elif getattr(node, 'parent_id', False): parent_subscriptions = \ compile_subscriptions(AbstractNode.load(node.parent_id), event_type, level=level + 1) else: parent_subscriptions = check_node(None, event_type) for notification_type in parent_subscriptions: p_sub_n = parent_subscriptions[notification_type] p_sub_n.extend(subscriptions[notification_type]) for nt in subscriptions: if notification_type != nt: p_sub_n = list(set(p_sub_n).difference(set(subscriptions[nt]))) if level == 0: p_sub_n, removed = utils.separate_users(node, p_sub_n) parent_subscriptions[notification_type] = p_sub_n return parent_subscriptions
def get_settings_url(uid, user): if uid == user._id: return web_url_for('user_notifications', _absolute=True) node = AbstractNode.load(uid) assert node, 'get_settings_url recieved an invalid Node id' return node.web_url_for('node_setting', _guid=True, _absolute=True)
def has_object_permission(self, request, view, obj): if not isinstance(obj, AbstractNode): obj = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg]) assert isinstance(obj, AbstractNode), 'obj must be an Node' if obj.is_registration: return request.method in permissions.SAFE_METHODS return True
def has_object_permission(self, request, view, obj): assert isinstance(obj, dict) auth = get_user_auth(request) parent_node = obj['self'] if request.method in permissions.SAFE_METHODS: return parent_node.can_view(auth) elif request.method == 'DELETE': return parent_node.can_edit(auth) else: has_parent_auth = parent_node.can_edit(auth) if not has_parent_auth: return False pointer_nodes = [] for pointer in request.data.get('data', []): node = AbstractNode.load(pointer['id']) if not node or node.is_collection: raise exceptions.NotFound(detail='Node with id "{}" was not found'.format(pointer['id'])) pointer_nodes.append(node) has_pointer_auth = True for pointer in pointer_nodes: if not pointer.can_view(auth): has_pointer_auth = False break return has_pointer_auth
def _send_global_and_node_emails(send_type): """ Called by `send_users_email`. Send all global and node-related notification emails. """ grouped_emails = get_users_emails(send_type) for group in grouped_emails: user = OSFUser.load(group['user_id']) if not user: log_exception() continue info = group['info'] notification_ids = [message['_id'] for message in info] sorted_messages = group_by_node(info) if sorted_messages: if not user.is_disabled: # If there's only one node in digest we can show it's preferences link in the template. notification_nodes = sorted_messages['children'].keys() node = AbstractNode.load(notification_nodes[0]) if len( notification_nodes) == 1 else None mails.send_mail( to_addr=user.username, mimetype='html', can_change_node_preferences=bool(node), node=node, mail=mails.DIGEST, name=user.fullname, message=sorted_messages, ) remove_notifications(email_notification_ids=notification_ids)
def fork_pointer(auth, node, **kwargs): """Fork a pointer. Raises BAD_REQUEST if pointer not provided, not found, or not present in `nodes`. :param Auth auth: Consolidated authorization :param Node node: root from which pointer is child :return: Fork of node to which nodelink(pointer) points """ NodeRelation = apps.get_model('osf.NodeRelation') linked_node_id = request.json.get('nodeId') linked_node = AbstractNode.load(linked_node_id) pointer = NodeRelation.objects.filter(child=linked_node, is_node_link=True, parent=node).first() if pointer is None: # TODO: Change this to 404? raise HTTPError(http.BAD_REQUEST) try: fork = node.fork_pointer(pointer, auth=auth, save=True) except ValueError: raise HTTPError(http.BAD_REQUEST) return { 'data': { 'node': serialize_node_summary(node=fork, auth=auth, show_path=False) } }, http.CREATED
def get_paginated_response(self, data): """Add number of unread comments to links.meta when viewing list of comments filtered by a target node, file or wiki page.""" response = super(CommentPagination, self).get_paginated_response(data) response_dict = response.data kwargs = self.request.parser_context['kwargs'].copy() if self.request.query_params.get('related_counts', False): target_id = self.request.query_params.get('filter[target]', None) node_id = kwargs.get('node_id', None) node = Node.load(node_id) user = self.request.user if target_id and not user.is_anonymous and node.is_contributor(user): root_target = Guid.load(target_id) if root_target: page = getattr(root_target.referent, 'root_target_page', None) if page: if not len(data): unread = 0 else: unread = Comment.find_n_unread(user=user, node=node, page=page, root_id=target_id) if self.request.version < '2.1': response_dict['links']['meta']['unread'] = unread else: response_dict['meta']['unread'] = unread return Response(response_dict)
def has_object_permission(self, request, view, obj): assert isinstance(obj, dict) auth = get_user_auth(request) collection = obj['self'] has_collection_auth = auth.user and auth.user.has_perm('write_collection', collection) if request.method in permissions.SAFE_METHODS: if collection.is_public: return True elif request.method == 'DELETE': return has_collection_auth if not has_collection_auth: return False pointer_nodes = [] for pointer in request.data.get('data', []): node = AbstractNode.load(pointer['id']) if not node: raise NotFound(detail='Node with id "{}" was not found'.format(pointer['id'])) pointer_nodes.append(node) has_pointer_auth = True # TODO: is this necessary? get_object checks can_view for pointer in pointer_nodes: if not pointer.can_view(auth): has_pointer_auth = False break return has_pointer_auth
def test_bulk_creates_children_and_sanitizes_html_logged_in_owner( self, app, user, project, url): title = '<em>Reasoning</em> <strong>Aboot Projects</strong>' description = 'A <script>alert("super reasonable")</script> child' res = app.post_json_api(url, { 'data': [{ 'type': 'nodes', 'attributes': { 'title': title, 'description': description, 'category': 'project', 'public': True } }] }, auth=user.auth, bulk=True) child_id = res.json['data'][0]['id'] assert res.status_code == 201 url = '/{}nodes/{}/'.format(API_BASE, child_id) res = app.get(url, auth=user.auth) assert res.json['data']['attributes']['title'] == strip_html(title) assert res.json['data']['attributes']['description'] == strip_html( description) assert res.json['data']['attributes']['category'] == 'project' project.reload() child_id = res.json['data']['id'] assert child_id == project.nodes[0]._id assert AbstractNode.load(child_id).logs.latest( ).action == NodeLog.PROJECT_CREATED
def handle(self, *args, **options): guids = options.get('guids', []) flag = options.get('flag', False) for guid in guids: logger.info('Checking Node {}...'.format(guid)) check_spam(AbstractNode.load(guid), flag=flag)
def get_enabled_authorized_linked(user_settings_list, has_external_account, short_name): """ Gather the number of users who have at least one node in each of the stages for an addon :param user_settings_list: list of user_settings for a particualr addon :param has_external_account: where addon is derrived from, determines method to load node settings :param short_name: short name of addon to get correct node_settings :return: dict with number of users that have at least one project at each stage """ from addons.forward.models import NodeSettings as ForwardNodeSettings num_enabled = 0 # of users w/ 1+ addon account connected num_authorized = 0 # of users w/ 1+ addon account connected to 1+ node num_linked = 0 # of users w/ 1+ addon account connected to 1+ node and configured # osfstorage and wiki don't have user_settings, so always assume they're enabled, authorized, linked if short_name == 'osfstorage' or short_name == 'wiki': num_enabled = num_authorized = num_linked = OSFUser.objects.filter( is_registered=True, password__isnull=False, merged_by__isnull=True, date_disabled__isnull=True, date_confirmed__isnull=False ).count() elif short_name == 'forward': num_enabled = num_authorized = ForwardNodeSettings.objects.count() num_linked = ForwardNodeSettings.objects.filter(url__isnull=False).count() else: for user_settings in paginated(user_settings_list): node_settings_list = [] if has_external_account: if user_settings.has_auth: num_enabled += 1 node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.oauth_grants.keys()] else: num_enabled += 1 node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.nodes_authorized] if any([ns.has_auth for ns in node_settings_list if ns]): num_authorized += 1 if any([(ns.complete and ns.configured) for ns in node_settings_list if ns]): num_linked += 1 return { 'enabled': num_enabled, 'authorized': num_authorized, 'linked': num_linked }
def on_delete(self): """When the user deactivates the addon, clear auth for connected nodes. """ super(AddonOAuthUserSettingsBase, self).on_delete() nodes = [Node.load(node_id) for node_id in self.oauth_grants.keys()] for node in nodes: node_addon = node.get_addon(self.oauth_provider.short_name) if node_addon and node_addon.user_settings == self: node_addon.clear_auth()
def has_object_permission(self, request, view, obj): if isinstance(obj, Node): node = obj else: context = request.parser_context['kwargs'] node = AbstractNode.load(context[view.node_lookup_url_kwarg]) if node.is_retracted: return False return True
def get_node_title(self, obj): user = self.context['request'].user node_title = obj['node']['title'] node = AbstractNode.load(obj['node']['_id']) if not user.is_authenticated: if node.is_public: return node_title elif node.has_permission(user, osf_permissions.READ): return node_title return 'Private Component'
def main(dry=True): systagfile = sys.argv[1] with open(systagfile, 'r') as fp: systag_data = json.load(fp) for node_id, systags in systag_data.iteritems(): node = AbstractNode.load(node_id) for systag in systags: logger.info('Adding {} as a system tag to AbstractNode {}'.format(systag, node._id)) if not dry: node.add_system_tag(systag, save=True)
def search_contributor(auth): user = auth.user if auth else None nid = request.args.get('excludeNode') exclude = AbstractNode.load(nid).contributors if nid else [] # TODO: Determine whether bleach is appropriate for ES payload. Also, inconsistent with website.sanitize.util.strip_html query = bleach.clean(request.args.get('query', ''), tags=[], strip=True) page = int(bleach.clean(request.args.get('page', '0'), tags=[], strip=True)) size = int(bleach.clean(request.args.get('size', '5'), tags=[], strip=True)) return search.search_contributor(query=query, page=page, size=size, exclude=exclude, current_user=user)
def load_parent(parent_id): parent = AbstractNode.load(parent_id) if parent and parent.is_public: return { 'title': parent.title, 'url': parent.url, 'id': parent._id, 'is_registation': parent.is_registration, } return None
def has_object_permission(self, request, view, obj): # Preprints cannot be registrations if isinstance(obj, Preprint): return True if not isinstance(obj, AbstractNode): obj = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg]) assert_resource_type(obj, self.acceptable_models) if obj.is_registration: return request.method in permissions.SAFE_METHODS return True
def has_object_permission(self, request, view, obj): assert isinstance(obj, (AbstractNode, OSFUser, Contributor)), 'obj must be User, Contributor, or Node, got {}'.format(obj) auth = get_user_auth(request) context = request.parser_context['kwargs'] node = AbstractNode.load(context[view.node_lookup_url_kwarg]) user = OSFUser.load(context['user_id']) if request.method in permissions.SAFE_METHODS: return node.is_public or node.can_view(auth) elif request.method == 'DELETE': return node.has_permission(auth.user, osf_permissions.ADMIN) or auth.user == user else: return node.has_permission(auth.user, osf_permissions.ADMIN)
def get_paginated_response(self, data): """ Add number of bibliographic contributors to links.meta""" response = super(NodeContributorPagination, self).get_paginated_response(data) response_dict = response.data kwargs = self.request.parser_context['kwargs'].copy() node_id = kwargs.get('node_id', None) node = Node.load(node_id) total_bibliographic = node.visible_contributors.count() if self.request.version < '2.1': response_dict['links']['meta']['total_bibliographic'] = total_bibliographic else: response_dict['meta']['total_bibliographic'] = total_bibliographic return Response(response_dict)
def clone_wiki(self, node_id): """Clone a node wiki page. :param node: The Node of the cloned wiki page :return: The cloned wiki page """ node = AbstractNode.load(node_id) if not node: raise ValueError('Invalid node') clone = self.clone() clone.node = node clone.user = self.user clone.save() return clone
def has_object_permission(self, request, view, obj): node_link = NodeRelation.load(request.parser_context['kwargs']['node_link_id']) node = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg]) auth = get_user_auth(request) if request.method == 'DELETE'and node.is_registration: raise exceptions.MethodNotAllowed(method=request.method) if node.is_collection or node.is_registration: raise exceptions.NotFound if node != node_link.parent: raise exceptions.NotFound if request.method == 'DELETE' and not node.can_edit(auth): return False return True
def has_object_permission(self, request, view, obj): assert isinstance(obj, (AbstractNode, NodeRelation)), 'obj must be an Node or NodeRelation, got {}'.format(obj) auth = get_user_auth(request) parent_node = AbstractNode.load(request.parser_context['kwargs']['node_id']) pointer_node = NodeRelation.load(request.parser_context['kwargs']['node_link_id']).child if request.method in permissions.SAFE_METHODS: has_parent_auth = parent_node.can_view(auth) has_pointer_auth = pointer_node.can_view(auth) public = pointer_node.is_public has_auth = public or (has_parent_auth and has_pointer_auth) return has_auth else: has_auth = parent_node.can_edit(auth) return has_auth
def project_remove_contributor(auth, **kwargs): """Remove a contributor from a list of nodes. :param Auth auth: Consolidated authorization :raises: HTTPError(400) if contributors to be removed are not in list or if no admin users would remain after changes were applied """ contributor_id = request.get_json()['contributorID'] node_ids = request.get_json()['nodeIDs'] contributor = OSFUser.load(contributor_id) if contributor is None: raise HTTPError(http.BAD_REQUEST, data={'message_long': 'Contributor not found.'}) redirect_url = {} parent_id = node_ids[0] for node_id in node_ids: # Update permissions and order node = AbstractNode.load(node_id) # Forbidden unless user is removing herself if not node.has_permission(auth.user, 'admin'): if auth.user != contributor: raise HTTPError(http.FORBIDDEN) if node.visible_contributors.count() == 1 \ and node.visible_contributors[0] == contributor: raise HTTPError(http.FORBIDDEN, data={ 'message_long': 'Must have at least one bibliographic contributor' }) nodes_removed = node.remove_contributor(contributor, auth=auth) # remove_contributor returns false if there is not one admin or visible contributor left after the move. if not nodes_removed: raise HTTPError(http.BAD_REQUEST, data={ 'message_long': 'Could not remove contributor.'}) # On parent node, if user has removed herself from project, alert; redirect to # node summary if node is public, else to user's dashboard page if not node.is_contributor(auth.user) and node_id == parent_id: status.push_status_message( 'You have removed yourself as a contributor from this project', kind='success', trust=False, id='remove_self_contrib' ) if node.is_public: redirect_url = {'redirectUrl': node.url} else: redirect_url = {'redirectUrl': web_url_for('dashboard')} return redirect_url
def get_nodes_to_add_remove(self, nodes, new_nodes): diff = relationship_diff( current_items={node._id: node for node in nodes}, new_items={node['_id']: node for node in new_nodes} ) nodes_to_add = [] for node_id in diff['add']: node = AbstractNode.load(node_id) if not node: raise NotFound nodes_to_add.append(node) return nodes_to_add, diff['remove'].values()
def remove_pointer(auth, node, **kwargs): """Remove a pointer from a node, raising a 400 if the pointer is not in `node.nodes`. """ # TODO: since these a delete request, shouldn't use request body. put pointer # id in the URL instead pointer_id = request.json.get('pointerId') if pointer_id is None: raise HTTPError(http.BAD_REQUEST) pointer = AbstractNode.load(pointer_id) if pointer is None: raise HTTPError(http.BAD_REQUEST) try: node.rm_pointer(pointer, auth=auth) except ValueError: raise HTTPError(http.BAD_REQUEST) node.save()
def archive_success(dst_pk, job_pk): """Archiver's final callback. For the time being the use case for this task is to rewrite references to files selected in a registration schema (the Prereg Challenge being the first to expose this feature). The created references point to files on the registered_from Node (needed for previewing schema data), and must be re-associated with the corresponding files in the newly created registration. :param str dst_pk: primary key of registration Node note:: At first glance this task makes redundant calls to utils.get_file_map (which returns a generator yielding (<sha256>, <file_metadata>) pairs) on the dst Node. Two notes about utils.get_file_map: 1) this function memoizes previous results to reduce overhead and 2) this function returns a generator that lazily fetches the file metadata of child Nodes (it is possible for a selected file to belong to a child Node) using a non-recursive DFS. Combined this allows for a relatively effient implementation with seemingly redundant calls. """ create_app_context() dst = AbstractNode.load(dst_pk) # Cache registration files count dst.update_files_count() # The filePicker extension addded with the Prereg Challenge registration schema # allows users to select files in OSFStorage as their response to some schema # questions. These files are references to files on the unregistered Node, and # consequently we must migrate those file paths after archiver has run. Using # sha256 hashes is a convenient way to identify files post-archival. for schema in dst.registered_schema.all(): if schema.has_files: utils.migrate_file_metadata(dst, schema) job = ArchiveJob.load(job_pk) if not job.sent: job.sent = True job.save() dst.sanction.ask( dst.get_active_contributors_recursive(unique_users=True)) if settings.SHARE_ENABLED: update_share(dst)
def test_bulk_creates_children_child_logged_in_write_contributor(self, app, user, project, child_one, child_two, url): write_contrib = AuthUserFactory() project.add_contributor(write_contrib, permissions=[permissions.READ, permissions.WRITE], auth=Auth(user), save=True) res = app.post_json_api(url, {'data': [child_one, child_two]}, auth=write_contrib.auth, bulk=True) assert res.status_code == 201 assert res.json['data'][0]['attributes']['title'] == child_one['attributes']['title'] assert res.json['data'][0]['attributes']['description'] == child_one['attributes']['description'] assert res.json['data'][0]['attributes']['category'] == child_one['attributes']['category'] assert res.json['data'][1]['attributes']['title'] == child_two['attributes']['title'] assert res.json['data'][1]['attributes']['description'] == child_two['attributes']['description'] assert res.json['data'][1]['attributes']['category'] == child_two['attributes']['category'] project.reload() child_id = res.json['data'][0]['id'] child_two_id = res.json['data'][1]['id'] nodes = project.nodes assert child_id == nodes[0]._id assert child_two_id == nodes[1]._id assert AbstractNode.load(child_id).logs.latest().action == NodeLog.PROJECT_CREATED assert nodes[1].logs.latest().action == NodeLog.PROJECT_CREATED
def test_creates_child_logged_in_write_contributor(self, app, user, project, child, url): write_contrib = AuthUserFactory() project.add_contributor(write_contrib, permissions=permissions.WRITE, auth=Auth(user), save=True) res = app.post_json_api(url, child, auth=write_contrib.auth) assert res.status_code == 201 assert res.json['data']['attributes']['title'] == child['data'][ 'attributes']['title'] assert res.json['data']['attributes']['description'] == child['data'][ 'attributes']['description'] assert res.json['data']['attributes']['category'] == child['data'][ 'attributes']['category'] project.reload() child_id = res.json['data']['id'] assert child_id == project.nodes[0]._id assert AbstractNode.load( child_id).logs.latest().action == NodeLog.PROJECT_CREATED
def project_generate_private_link_post(auth, node, **kwargs): """ creata a new private link object and add it to the node and its selected children""" node_ids = request.json.get('node_ids', []) name = request.json.get('name', '') anonymous = request.json.get('anonymous', False) if node._id not in node_ids: node_ids.insert(0, node._id) nodes = [AbstractNode.load(node_id) for node_id in node_ids] try: new_link = new_private_link(name=name, user=auth.user, nodes=nodes, anonymous=anonymous) except ValidationError as e: raise HTTPError(http.BAD_REQUEST, data=dict(message_long=e.message)) return new_link
def project_new_post(auth, **kwargs): user = auth.user data = request.get_json() title = strip_html(data.get('title')) title = title.strip() category = data.get('category', 'project') template = data.get('template') description = strip_html(data.get('description')) new_project = {} if template: original_node = AbstractNode.load(template) changes = { 'title': title, 'category': category, 'template_node': original_node, } if description: changes['description'] = description project = original_node.use_as_template(auth=auth, changes={ template: changes, }) else: try: project = new_node(category, title, user, description) except ValidationError as e: raise HTTPError(http.BAD_REQUEST, data=dict(message_long=e.message)) new_project = _view_project(project, auth) return { 'projectUrl': project.url, 'newNode': new_project['node'] if new_project else None }, http.CREATED
def create(self, validated_data): inst = self.context['view'].get_object()['self'] user = self.context['request'].user node_dicts = validated_data['data'] changes_flag = False for node_dict in node_dicts: node = AbstractNode.load(node_dict['_id']) if not node: raise exceptions.NotFound(detail='AbstractNode with id "{}" was not found'.format(node_dict['_id'])) if not node.has_permission(user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='Write permission on node {} required'.format(node_dict['_id'])) if not node.is_affiliated_with_institution(inst): node.add_affiliated_institution(inst, user, save=True) changes_flag = True if not changes_flag: raise RelationshipPostMakesNoChanges return { 'data': list(inst.nodes.filter(is_deleted=False, type='osf.node')), 'self': inst }
def _add_related_claimed_tag_to_user(pid, user): """ Adds claimed tag to incoming users, depending on whether the resource has related source tags :param pid: guid of either the node or the preprint :param user: the claiming user """ node = AbstractNode.load(pid) preprint = Preprint.load(pid) osf_claimed_tag, created = Tag.all_tags.get_or_create(name=provider_claimed_tag('osf'), system=True) if node: node_source_tags = node.all_tags.filter(name__icontains='source:', system=True) if node_source_tags.exists(): for tag in node_source_tags: claimed_tag, created = Tag.all_tags.get_or_create(name=NODE_SOURCE_TAG_CLAIMED_TAG_RELATION[tag.name], system=True) user.add_system_tag(claimed_tag) else: user.add_system_tag(osf_claimed_tag) elif preprint: provider_id = preprint.provider._id preprint_claimed_tag, created = Tag.all_tags.get_or_create(name=provider_claimed_tag(provider_id, 'preprint'), system=True) user.add_system_tag(preprint_claimed_tag)
def create_waterbutler_log(payload, **kwargs): with transaction.atomic(): try: auth = payload['auth'] # Don't log download actions if payload['action'] in DOWNLOAD_ACTIONS: guid = Guid.load(payload['metadata'].get('nid')) if guid: node = guid.referent return {'status': 'success'} user = OSFUser.load(auth['id']) if user is None: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) action = LOG_ACTION_MAP[payload['action']] except KeyError: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) auth = Auth(user=user) node = kwargs.get('node') or kwargs.get('project') or Preprint.load( kwargs.get('nid')) or Preprint.load(kwargs.get('pid')) if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED): for bundle in ('source', 'destination'): for key in ('provider', 'materialized', 'name', 'nid'): if key not in payload[bundle]: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) dest = payload['destination'] src = payload['source'] if src is not None and dest is not None: dest_path = dest['materialized'] src_path = src['materialized'] if dest_path.endswith('/') and src_path.endswith('/'): dest_path = os.path.dirname(dest_path) src_path = os.path.dirname(src_path) if (os.path.split(dest_path)[0] == os.path.split(src_path)[0] and dest['provider'] == src['provider'] and dest['nid'] == src['nid'] and dest['name'] != src['name']): action = LOG_ACTION_MAP['rename'] destination_node = node # For clarity source_node = AbstractNode.load(src['nid']) or Preprint.load( src['nid']) # We return provider fullname so we need to load node settings, if applicable source = None if hasattr(source_node, 'get_addon'): source = source_node.get_addon(payload['source']['provider']) destination = None if hasattr(node, 'get_addon'): destination = node.get_addon( payload['destination']['provider']) payload['source'].update({ 'materialized': payload['source']['materialized'].lstrip('/'), 'addon': source.config.full_name if source else 'osfstorage', 'url': source_node.web_url_for( 'addon_view_or_download_file', path=payload['source']['path'].lstrip('/'), provider=payload['source']['provider']), 'node': { '_id': source_node._id, 'url': source_node.url, 'title': source_node.title, } }) payload['destination'].update({ 'materialized': payload['destination']['materialized'].lstrip('/'), 'addon': destination.config.full_name if destination else 'osfstorage', 'url': destination_node.web_url_for( 'addon_view_or_download_file', path=payload['destination']['path'].lstrip('/'), provider=payload['destination']['provider']), 'node': { '_id': destination_node._id, 'url': destination_node.url, 'title': destination_node.title, } }) if not payload.get('errors'): destination_node.add_log(action=action, auth=auth, params=payload) if payload.get('email') is True or payload.get('errors'): mails.send_mail( user.username, mails.FILE_OPERATION_FAILED if payload.get('errors') else mails.FILE_OPERATION_SUCCESS, action=payload['action'], source_node=source_node, destination_node=destination_node, source_path=payload['source']['materialized'], source_addon=payload['source']['addon'], destination_addon=payload['destination']['addon'], osf_support_email=settings.OSF_SUPPORT_EMAIL) if payload.get('errors'): # Action failed but our function succeeded # Bail out to avoid file_signals return {'status': 'success'} else: node.create_waterbutler_log(auth, action, payload) metadata = payload.get('metadata') or payload.get('destination') target_node = AbstractNode.load(metadata.get('nid')) if target_node and not target_node.is_quickfiles and payload[ 'action'] != 'download_file': update_storage_usage_with_size(payload) with transaction.atomic(): file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload) return {'status': 'success'}
def get_auth(auth, **kwargs): cas_resp = None if not auth.user: # Central Authentication Server OAuth Bearer Token authorization = request.headers.get('Authorization') if authorization and authorization.startswith('Bearer '): client = cas.get_client() try: access_token = cas.parse_auth_header(authorization) cas_resp = client.profile(access_token) except cas.CasError as err: sentry.log_exception() # NOTE: We assume that the request is an AJAX request return json_renderer(err) if cas_resp.authenticated: auth.user = OSFUser.load(cas_resp.user) try: data = jwt.decode(jwe.decrypt( request.args.get('payload', '').encode('utf-8'), WATERBUTLER_JWE_KEY), settings.WATERBUTLER_JWT_SECRET, options={'require_exp': True}, algorithm=settings.WATERBUTLER_JWT_ALGORITHM)['data'] except (jwt.InvalidTokenError, KeyError) as err: sentry.log_message(str(err)) raise HTTPError(http_status.HTTP_403_FORBIDDEN) if not auth.user: auth.user = OSFUser.from_cookie(data.get('cookie', '')) try: action = data['action'] node_id = data['nid'] provider_name = data['provider'] except KeyError: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) node = AbstractNode.load(node_id) or Preprint.load(node_id) if node and node.is_deleted: raise HTTPError(http_status.HTTP_410_GONE) elif not node: raise HTTPError(http_status.HTTP_404_NOT_FOUND) check_access(node, auth, action, cas_resp) provider_settings = None if hasattr(node, 'get_addon'): provider_settings = node.get_addon(provider_name) if not provider_settings: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) path = data.get('path') credentials = None waterbutler_settings = None fileversion = None if provider_name == 'osfstorage': if path: file_id = path.strip('/') # check to see if this is a file or a folder filenode = OsfStorageFileNode.load(path.strip('/')) if filenode and filenode.is_file: # default to most recent version if none is provided in the response version = int(data['version']) if data.get( 'version') else filenode.versions.count() try: fileversion = FileVersion.objects.filter( basefilenode___id=file_id, identifier=version).select_related('region').get() except FileVersion.DoesNotExist: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) if auth.user: # mark fileversion as seen FileVersionUserMetadata.objects.get_or_create( user=auth.user, file_version=fileversion) if not node.is_contributor_or_group_member(auth.user): from_mfr = download_is_from_mfr(request, payload=data) # version index is 0 based version_index = version - 1 if action == 'render': update_analytics(node, filenode, version_index, 'view') elif action == 'download' and not from_mfr: update_analytics(node, filenode, version_index, 'download') if waffle.switch_is_active(features.ELASTICSEARCH_METRICS): if isinstance(node, Preprint): metric_class = get_metric_class_for_action( action, from_mfr=from_mfr) if metric_class: sloan_flags = { 'sloan_id': request.cookies.get(SLOAN_ID_COOKIE_NAME) } for flag_name in SLOAN_FLAGS: value = request.cookies.get( f'dwf_{flag_name}_custom_domain' ) or request.cookies.get( f'dwf_{flag_name}') if value: sloan_flags[flag_name.replace( '_display', '')] = strtobool(value) try: metric_class.record_for_preprint( preprint=node, user=auth.user, version=fileversion.identifier if fileversion else None, path=path, **sloan_flags) except es_exceptions.ConnectionError: log_exception() if fileversion and provider_settings: region = fileversion.region credentials = region.waterbutler_credentials waterbutler_settings = fileversion.serialize_waterbutler_settings( node_id=provider_settings.owner._id, root_id=provider_settings.root_node._id, ) # If they haven't been set by version region, use the NodeSettings or Preprint directly if not (credentials and waterbutler_settings): credentials = node.serialize_waterbutler_credentials(provider_name) waterbutler_settings = node.serialize_waterbutler_settings( provider_name) if isinstance(credentials.get('token'), bytes): credentials['token'] = credentials.get('token').decode() return { 'payload': jwe.encrypt( jwt.encode( { 'exp': timezone.now() + datetime.timedelta( seconds=settings.WATERBUTLER_JWT_EXPIRATION), 'data': { 'auth': make_auth( auth.user ), # A waterbutler auth dict not an Auth object 'credentials': credentials, 'settings': waterbutler_settings, 'callback_url': node.api_url_for( ('create_waterbutler_log' if not getattr(node, 'is_registration', False) else 'registration_callbacks'), _absolute=True, _internal=True) } }, settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM), WATERBUTLER_JWE_KEY).decode() }
def has_object_permission(self, request, view, obj): context = request.parser_context['kwargs'] node = AbstractNode.load(context[view.node_lookup_url_kwarg]) if node.is_retracted: return False return True
def get_resource(self, kwargs): resource_id = kwargs.get('node_id', None) return AbstractNode.load(resource_id)
def load_resource(self, context, view): return AbstractNode.load(context[view.node_lookup_url_kwarg])
def get_auth(auth, **kwargs): cas_resp = None if not auth.user: # Central Authentication Server OAuth Bearer Token authorization = request.headers.get('Authorization') if authorization and authorization.startswith('Bearer '): client = cas.get_client() try: access_token = cas.parse_auth_header(authorization) cas_resp = client.profile(access_token) except cas.CasError as err: sentry.log_exception() # NOTE: We assume that the request is an AJAX request return json_renderer(err) if cas_resp.authenticated: auth.user = OSFUser.load(cas_resp.user) try: data = jwt.decode(jwe.decrypt( request.args.get('payload', '').encode('utf-8'), WATERBUTLER_JWE_KEY), settings.WATERBUTLER_JWT_SECRET, options={'require_exp': True}, algorithm=settings.WATERBUTLER_JWT_ALGORITHM)['data'] except (jwt.InvalidTokenError, KeyError) as err: sentry.log_message(str(err)) raise HTTPError(httplib.FORBIDDEN) if not auth.user: auth.user = OSFUser.from_cookie(data.get('cookie', '')) try: action = data['action'] node_id = data['nid'] provider_name = data['provider'] except KeyError: raise HTTPError(httplib.BAD_REQUEST) node = AbstractNode.load(node_id) if not node: raise HTTPError(httplib.NOT_FOUND) check_access(node, auth, action, cas_resp) provider_settings = node.get_addon(provider_name) if not provider_settings: raise HTTPError(httplib.BAD_REQUEST) try: credentials = provider_settings.serialize_waterbutler_credentials() waterbutler_settings = provider_settings.serialize_waterbutler_settings( ) except exceptions.AddonError: log_exception() raise HTTPError(httplib.BAD_REQUEST) return { 'payload': jwe.encrypt( jwt.encode( { 'exp': timezone.now() + datetime.timedelta( seconds=settings.WATERBUTLER_JWT_EXPIRATION), 'data': { 'auth': make_auth( auth.user ), # A waterbutler auth dict not an Auth object 'credentials': credentials, 'settings': waterbutler_settings, 'callback_url': node.api_url_for(('create_waterbutler_log' if not node.is_registration else 'registration_callbacks'), _absolute=True, _internal=True), } }, settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM), WATERBUTLER_JWE_KEY) }
def configure_subscription(auth): user = auth.user json_data = request.get_json() target_id = json_data.get('id') event = json_data.get('event') notification_type = json_data.get('notification_type') path = json_data.get('path') provider = json_data.get('provider') if not event or (notification_type not in NOTIFICATION_TYPES and notification_type != 'adopt_parent'): raise HTTPError( http.BAD_REQUEST, data=dict( message_long= 'Must provide an event and notification type for subscription.' )) node = AbstractNode.load(target_id) if 'file_updated' in event and path is not None and provider is not None: wb_path = path.lstrip('/') event = wb_path + '_file_updated' event_id = utils.to_subscription_key(target_id, event) if not node: # if target_id is not a node it currently must be the current user if not target_id == user._id: sentry.log_message('{!r} attempted to subscribe to either a bad ' 'id or non-node non-self id, {}'.format( user, target_id)) raise HTTPError(http.NOT_FOUND) if notification_type == 'adopt_parent': sentry.log_message( '{!r} attempted to adopt_parent of a none node id, {}'.format( user, target_id)) raise HTTPError(http.BAD_REQUEST) owner = user else: if not node.has_permission(user, READ): sentry.log_message( '{!r} attempted to subscribe to private node, {}'.format( user, target_id)) raise HTTPError(http.FORBIDDEN) if isinstance(node, Registration): sentry.log_message( '{!r} attempted to subscribe to registration, {}'.format( user, target_id)) raise HTTPError(http.BAD_REQUEST) if notification_type != 'adopt_parent': owner = node else: if 'file_updated' in event and len(event) > len('file_updated'): pass else: parent = node.parent_node if not parent: sentry.log_message('{!r} attempted to adopt_parent of ' 'the parentless project, {!r}'.format( user, node)) raise HTTPError(http.BAD_REQUEST) # If adopt_parent make sure that this subscription is None for the current User subscription = NotificationSubscription.load(event_id) if not subscription: return {} # We're done here subscription.remove_user_from_subscription(user) return {} subscription = NotificationSubscription.load(event_id) if not subscription: subscription = NotificationSubscription(_id=event_id, owner=owner, event_name=event) subscription.save() if node and node._id not in user.notifications_configured: user.notifications_configured[node._id] = True user.save() subscription.add_user_to_subscription(user, notification_type) subscription.save() return { 'message': 'Successfully subscribed to {} list on {}'.format( notification_type, event_id) }
def main_task(osf_cookie, data, request_info): ''' Creates a temporary folder to download the files from the FTP server, downloads from it, uploads to the selected storage and deletes the temporary files. ''' try: tmp_path = create_tmp_folder(request_info['uid']) if not tmp_path: raise OSError('Could not create temporary folder.') try: downloaded = DOWNLOAD_FUNCTIONS[data['protocol']](tmp_path, data) if not downloaded: raise RuntimeError('Could not download the file(s) from the FTP server.') except IOError: raise RuntimeError('Could not download the file(s) from the FTP server.') dest_path = 'osfstorage/' if 'destFolderId' not in data else data['destFolderId'] uploaded = waterbutler.upload_folder_recursive(osf_cookie, data['destPid'], tmp_path, dest_path) shutil.rmtree(tmp_path) # Variables for logging into recent activity node = AbstractNode.load(request_info['node_id']) user = OSFUser.load(request_info['uid']) auth = Auth(user=user) if uploaded['fail_file'] > 0 or uploaded['fail_folder'] > 0: # Recent activity log node.add_log( action='ftp_upload_fail', params={ 'node': request_info['node_id'], 'project': request_info['pid'], 'filecount': uploaded['fail_file'], 'foldercount': uploaded['fail_folder'] }, auth=auth ) # Exception fails = [] if uploaded['fail_file'] > 0: fails.append('%s file(s)' % uploaded['fail_file']) if uploaded['fail_folder'] > 0: fails.append('%s folder(s)' % uploaded['fail_folder']) message = 'Failed to upload %s to storage.' % ( ' and '.join(fails) ) raise RuntimeError(message) node.add_log( action='ftp_upload_success', params={ 'node': request_info['node_id'], 'project': request_info['pid'], }, auth=auth ) except SoftTimeLimitExceeded: tmp_path = tmp_path if 'tmp_path' in locals() else None fail_cleanup(tmp_path) except Exception: tmp_path = tmp_path if 'tmp_path' in locals() else None fail_cleanup(tmp_path) raise return True
def get_auth(auth, **kwargs): cas_resp = None if not auth.user: # Central Authentication Server OAuth Bearer Token authorization = request.headers.get('Authorization') if authorization and authorization.startswith('Bearer '): client = cas.get_client() try: access_token = cas.parse_auth_header(authorization) cas_resp = client.profile(access_token) except cas.CasError as err: sentry.log_exception() # NOTE: We assume that the request is an AJAX request return json_renderer(err) if cas_resp.authenticated: auth.user = OSFUser.load(cas_resp.user) try: data = jwt.decode( jwe.decrypt(request.args.get('payload', '').encode('utf-8'), WATERBUTLER_JWE_KEY), settings.WATERBUTLER_JWT_SECRET, options={'require_exp': True}, algorithm=settings.WATERBUTLER_JWT_ALGORITHM )['data'] except (jwt.InvalidTokenError, KeyError) as err: sentry.log_message(str(err)) raise HTTPError(httplib.FORBIDDEN) if not auth.user: auth.user = OSFUser.from_cookie(data.get('cookie', '')) try: action = data['action'] node_id = data['nid'] provider_name = data['provider'] except KeyError: raise HTTPError(httplib.BAD_REQUEST) node = AbstractNode.load(node_id) or Preprint.load(node_id) if not node: raise HTTPError(httplib.NOT_FOUND) check_access(node, auth, action, cas_resp) provider_settings = None if hasattr(node, 'get_addon'): provider_settings = node.get_addon(provider_name) if not provider_settings: raise HTTPError(httplib.BAD_REQUEST) try: path = data.get('path') version = data.get('version') credentials = None waterbutler_settings = None fileversion = None if provider_name == 'osfstorage': if path and version: # check to see if this is a file or a folder filenode = OsfStorageFileNode.load(path.strip('/')) if filenode and filenode.is_file: try: fileversion = FileVersion.objects.filter( basefilenode___id=path.strip('/'), identifier=version ).select_related('region').get() except FileVersion.DoesNotExist: raise HTTPError(httplib.BAD_REQUEST) # path and no version, use most recent version elif path: filenode = OsfStorageFileNode.load(path.strip('/')) if filenode and filenode.is_file: fileversion = FileVersion.objects.filter( basefilenode=filenode ).select_related('region').order_by('-created').first() if fileversion: region = fileversion.region credentials = region.waterbutler_credentials waterbutler_settings = fileversion.serialize_waterbutler_settings( node_id=provider_settings.owner._id if provider_settings else node._id, root_id=provider_settings.root_node._id if provider_settings else node.root_folder._id, ) # If they haven't been set by version region, use the NodeSettings region if not (credentials and waterbutler_settings): credentials = node.serialize_waterbutler_credentials(provider_name) waterbutler_settings = node.serialize_waterbutler_settings(provider_name) except exceptions.AddonError: log_exception() raise HTTPError(httplib.BAD_REQUEST) # TODO: Add a signal here? if waffle.switch_is_active(features.ELASTICSEARCH_METRICS): user = auth.user if isinstance(node, Preprint) and not node.is_contributor(user): metric_class = get_metric_class_for_action(action) if metric_class: try: metric_class.record_for_preprint( preprint=node, user=user, version=fileversion.identifier if fileversion else None, path=path ) except es_exceptions.ConnectionError: log_exception() return {'payload': jwe.encrypt(jwt.encode({ 'exp': timezone.now() + datetime.timedelta(seconds=settings.WATERBUTLER_JWT_EXPIRATION), 'data': { 'auth': make_auth(auth.user), # A waterbutler auth dict not an Auth object 'credentials': credentials, 'settings': waterbutler_settings, 'callback_url': node.api_url_for( ('create_waterbutler_log' if not getattr(node, 'is_registration', False) else 'registration_callbacks'), _absolute=True, _internal=True ) } }, settings.WATERBUTLER_JWT_SECRET, algorithm=settings.WATERBUTLER_JWT_ALGORITHM), WATERBUTLER_JWE_KEY)}
def create_waterbutler_log(payload, **kwargs): with transaction.atomic(): try: auth = payload['auth'] # Don't log download actions, but do update analytics if payload['action'] in DOWNLOAD_ACTIONS: node = AbstractNode.load(payload['metadata']['nid']) return {'status': 'success'} user = OSFUser.load(auth['id']) if user is None: raise HTTPError(httplib.BAD_REQUEST) action = LOG_ACTION_MAP[payload['action']] except KeyError: raise HTTPError(httplib.BAD_REQUEST) auth = Auth(user=user) node = kwargs['node'] or kwargs['project'] if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED): for bundle in ('source', 'destination'): for key in ('provider', 'materialized', 'name', 'nid'): if key not in payload[bundle]: raise HTTPError(httplib.BAD_REQUEST) dest = payload['destination'] src = payload['source'] if src is not None and dest is not None: dest_path = dest['materialized'] src_path = src['materialized'] if dest_path.endswith('/') and src_path.endswith('/'): dest_path = os.path.dirname(dest_path) src_path = os.path.dirname(src_path) if (os.path.split(dest_path)[0] == os.path.split(src_path)[0] and dest['provider'] == src['provider'] and dest['nid'] == src['nid'] and dest['name'] != src['name']): action = LOG_ACTION_MAP['rename'] destination_node = node # For clarity source_node = AbstractNode.load(payload['source']['nid']) source = source_node.get_addon(payload['source']['provider']) destination = node.get_addon(payload['destination']['provider']) payload['source'].update({ 'materialized': payload['source']['materialized'].lstrip('/'), 'addon': source.config.full_name, 'url': source_node.web_url_for( 'addon_view_or_download_file', path=payload['source']['path'].lstrip('/'), provider=payload['source']['provider']), 'node': { '_id': source_node._id, 'url': source_node.url, 'title': source_node.title, } }) payload['destination'].update({ 'materialized': payload['destination']['materialized'].lstrip('/'), 'addon': destination.config.full_name, 'url': destination_node.web_url_for( 'addon_view_or_download_file', path=payload['destination']['path'].lstrip('/'), provider=payload['destination']['provider']), 'node': { '_id': destination_node._id, 'url': destination_node.url, 'title': destination_node.title, } }) payload.update({ 'node': destination_node._id, 'project': destination_node.parent_id, }) if not payload.get('errors'): destination_node.add_log(action=action, auth=auth, params=payload) if payload.get('email') is True or payload.get('errors'): mails.send_mail( user.username, mails.FILE_OPERATION_FAILED if payload.get('errors') else mails.FILE_OPERATION_SUCCESS, action=payload['action'], source_node=source_node, destination_node=destination_node, source_path=payload['source']['materialized'], destination_path=payload['source']['materialized'], source_addon=payload['source']['addon'], destination_addon=payload['destination']['addon'], osf_support_email=settings.OSF_SUPPORT_EMAIL) if payload.get('errors'): # Action failed but our function succeeded # Bail out to avoid file_signals return {'status': 'success'} else: try: metadata = payload['metadata'] node_addon = node.get_addon(payload['provider']) except KeyError: raise HTTPError(httplib.BAD_REQUEST) if node_addon is None: raise HTTPError(httplib.BAD_REQUEST) metadata['path'] = metadata['path'].lstrip('/') node_addon.create_waterbutler_log(auth, action, metadata) with transaction.atomic(): file_signals.file_updated.send(node=node, user=user, event_type=action, payload=payload) return {'status': 'success'}
def get_nodes_with_oauth_grants(self, external_account): # Generator of nodes which have grants for this external account for node_id, grants in self.oauth_grants.iteritems(): node = AbstractNode.load(node_id) if external_account._id in grants.keys() and not node.is_deleted: yield node
def __init__(self, user, node, event, payload=None): super(ComplexFileEvent, self).__init__(user, node, event, payload=payload) self.source_node = AbstractNode.load(self.payload['source']['node']['_id']) self.addon = self.node.get_addon(self.payload['destination']['provider'])
def main_task(osf_cookie, data, request_info): ''' Creates a temporary folder to download the files from the inputted URL, downloads from it, uploads to the selected storage and deletes the temporary files. ''' try: tmp_path = create_tmp_folder(request_info['uid']) if not tmp_path: raise OSError('Could not create temporary folder.') download_process = get_files(tmp_path, data) download_process.communicate() # Wait for the process to finish if download_process.poll() != 0: # Checks the return_code raise RuntimeError('wget command returned a non-success code.') dest_path = 'osfstorage/' if 'folderId' not in data else data[ 'folderId'] uploaded = waterbutler.upload_folder_recursive(osf_cookie, data['pid'], tmp_path, dest_path) shutil.rmtree(tmp_path) # Variables for logging into recent activity node = AbstractNode.load(request_info['node_id']) user = OSFUser.load(request_info['uid']) auth = Auth(user=user) if uploaded['fail_file'] > 0 or uploaded['fail_folder'] > 0: # Recent activity log node.add_log(action='restfulapi_upload_fail', params={ 'node': request_info['node_id'], 'project': request_info['pid'], 'filecount': uploaded['fail_file'], 'foldercount': uploaded['fail_folder'] }, auth=auth) # Exception fails = [] if uploaded['fail_file'] > 0: fails.append('%s file(s)' % uploaded['fail_file']) if uploaded['fail_folder'] > 0: fails.append('%s folder(s)' % uploaded['fail_folder']) message = 'Failed to upload %s to storage.' % (' and '.join(fails)) raise RuntimeError(message) node.add_log(action='restfulapi_upload_success', params={ 'node': request_info['node_id'], 'project': request_info['pid'], }, auth=auth) except SoftTimeLimitExceeded: download_process = download_process if 'download_process' in locals( ) else None tmp_path = tmp_path if 'tmp_path' in locals() else None fail_cleanup(download_process, tmp_path) except Exception: download_process = download_process if 'download_process' in locals( ) else None tmp_path = tmp_path if 'tmp_path' in locals() else None fail_cleanup(download_process, tmp_path) raise return True