def collect_all_registrations_smart_folder(self): from website.project.model import Node all_my_registrations = Node.find_for_user( self.auth.user, ( Q('category', 'eq', 'project') & Q('is_deleted', 'eq', False) & Q('is_registration', 'eq', True) & Q('is_folder', 'eq', False) & # parent is not in the nodes list Q('__backrefs.parent.node.nodes', 'eq', None) ) ) comps = Node.find_for_user( self.auth.user, ( # components only Q('category', 'ne', 'project') & # parent is not in the nodes list Q('__backrefs.parent.node.nodes', 'nin', all_my_registrations.get_keys()) & # exclude deleted nodes Q('is_deleted', 'eq', False) & # exclude registrations Q('is_registration', 'eq', True) ) ) children_count = all_my_registrations.count() + comps.count() return self.make_smart_folder(ALL_MY_REGISTRATIONS_NAME, ALL_MY_REGISTRATIONS_ID, children_count)
def setUp(self): super(TestRenderToCSVResponse, self).setUp() self.app = TestApp(test_app) Node.remove() time_now = get_previous_midnight() NodeFactory(category='project', date_created=time_now) NodeFactory(category='project', date_created=time_now - timedelta(days=1)) last_time = time_now - timedelta(days=2) NodeFactory(category='project', date_created=last_time) NodeFactory(category='project', date_created=last_time) initial_time = last_time + timedelta(seconds=1) get_days_statistics(initial_time) midtime = last_time + timedelta(days=1, seconds=1) self.time = time_now + timedelta(seconds=1) self.initial_static = [ 'id,users,delta_users,unregistered_users,projects,delta_projects,public_projects,' 'delta_public_projects,registered_projects,delta_registered_projects,date\r', construct_query(1, initial_time), ''] self.latest_static = [ 'id,users,delta_users,unregistered_users,projects,delta_projects,public_projects,' 'delta_public_projects,registered_projects,delta_registered_projects,date\r', construct_query(3, self.time), construct_query(2, midtime), construct_query(1, initial_time), '']
def setUp(self): super(TestMetricsGetProjects, self).setUp() Node.remove() self.node = NodeFactory(category='project', is_public=True) # makes 2 nodes bc of category self.reg = RegistrationFactory() # makes 2 nodes self.node_2 = NodeFactory()
def setUp(self): super(TestMetricsGetProjects, self).setUp() Node.remove() self.public_node = ProjectFactory(is_public=True) self.private_node = ProjectFactory(is_public=False) self.node_2 = NodeFactory() # creates parent project + node self.reg = RegistrationFactory(project=self.public_node)
def test_get_targets_referent_points_to_nothing(self): node = NodeFactory() bad_guid = Guid(referent=node) bad_guid.save() Node.remove(Q('_id', 'eq', node._id)) targets = list(get_targets()) assert_in(bad_guid, targets) assert_not_in(self.nontarget_guid, targets)
def main(send_email=False): logger.info('Starting Project storage audit') init_app(set_backends=True, routes=False) lines = [] projects = {} users = defaultdict(lambda: (0, 0)) progress_bar = progressbar.ProgressBar( maxval=Node.find(Q('parent_node', 'eq', None)).count()).start() for i, node in enumerate(Node.find(Q('parent_node', 'eq', None))): progress_bar.update(i + 1) if node._id in WHITE_LIST: continue # Dont count whitelisted nodes against users projects[node._id] = get_usage(node) for contrib in node.contributors: if node.can_edit(user=contrib): users[contrib._id] = tuple( map(sum, zip(users[contrib._id], projects[node._id])) ) # Adds tuples together, map(sum, zip((a, b), (c, d))) -> (a+c, b+d) if i % 25 == 0: # Clear all caches for key in ('node', 'user', 'fileversion', 'storedfilenode'): Node._cache.data.get(key, {}).clear() Node._object_cache.data.get(key, {}).clear() # Collect garbage gc.collect() progress_bar.finish() for model, collection, limit in ((User, users, USER_LIMIT), (Node, projects, PROJECT_LIMIT)): for item, (used, deleted) in filter(functools.partial(limit_filter, limit), collection.items()): line = '{!r} has exceeded the limit {:.2f}GBs ({}b) with {:.2f}GBs ({}b) used and {:.2f}GBs ({}b) deleted.'.format( model.load(item), limit / GBs, limit, used / GBs, used, deleted / GBs, deleted) logger.info(line) lines.append(line) if lines: if send_email: logger.info('Sending email...') mails.send_mail('*****@*****.**', mails.EMPTY, body='\n'.join(lines), subject='Script: OsfStorage usage audit') else: logger.info('send_email is False, not sending email'.format( len(lines))) logger.info('{} offending project(s) and user(s) found'.format( len(lines))) else: logger.info('No offending projects or users found')
def get_queryset(self): query = self.get_query_from_request() blacklisted = self.is_blacklisted(query) nodes = Node.find(query) # If attempting to filter on a blacklisted field, exclude retractions. if blacklisted: non_retracted_list = [node._id for node in nodes if not node.is_retracted] non_retracted_nodes = Node.find(Q('_id', 'in', non_retracted_list)) return non_retracted_nodes return nodes
def get_queryset(self): query = self.get_query_from_request() blacklisted = self.is_blacklisted(query) nodes = Node.find(query) # If attempting to filter on a blacklisted field, exclude withdrawals. if blacklisted: non_withdrawn_list = [node._id for node in nodes if not node.is_retracted] non_withdrawn_nodes = Node.find(Q("_id", "in", non_withdrawn_list)) return non_withdrawn_nodes return nodes
def _rejection_url_context(self, user_id): user_approval_state = self.approval_state.get(user_id, {}) rejection_token = user_approval_state.get("rejection_token") if rejection_token: from website.project.model import Node root_registration = Node.find_one(Q("retraction", "eq", self)) node_id = user_approval_state.get("node_id", root_registration._id) registration = Node.load(node_id) return {"node_id": registration.registered_from._id, "token": rejection_token}
def get_metadata_files(draft): data = draft.registration_metadata for q, question in get_file_questions('prereg-prize.json'): if not isinstance(data[q]['value'], dict): for i, file_info in enumerate(data[q]['extra']): provider = file_info['data']['provider'] if provider != 'osfstorage': raise Http404( 'File does not exist in OSFStorage ({}: {})'.format( q, question)) file_guid = file_info.get('fileId') if not file_guid: node = Node.load(file_info.get('nodeId')) path = file_info['data'].get('path') item = FileNode.resolve_class(provider, FileNode.FILE).get_or_create( node, path) file_guid = item.get_guid(create=True)._id data[q]['extra'][i]['fileId'] = file_guid draft.update_metadata(data) draft.save() else: guid = Guid.load(file_guid) item = guid.referent if item is None: raise Http404( 'File with guid "{}" in "{}" does not exist'.format( file_guid, question)) yield item continue for i, file_info in enumerate(data[q]['value']['uploader']['extra']): provider = file_info['data']['provider'] if provider != 'osfstorage': raise Http404( 'File does not exist in OSFStorage ({}: {})'.format( q, question)) file_guid = file_info.get('fileId') if not file_guid: node = Node.load(file_info.get('nodeId')) path = file_info['data'].get('path') item = FileNode.resolve_class(provider, FileNode.FILE).get_or_create( node, path) file_guid = item.get_guid(create=True)._id data[q]['value']['uploader']['extra'][i]['fileId'] = file_guid draft.update_metadata(data) draft.save() else: guid = Guid.load(file_guid) item = guid.referent if item is None: raise Http404( 'File with guid "{}" in "{}" does not exist'.format( file_guid, question)) yield item
def setUp(self): super(TestMetricsGetOSFStatistics, self).setUp() Node.remove() time_now = get_previous_midnight() NodeFactory(category="project", date_created=time_now) NodeFactory(category="project", date_created=time_now - timedelta(days=1)) last_time = time_now - timedelta(days=2) NodeFactory(category="project", date_created=last_time) NodeFactory(category="project", date_created=last_time) get_days_statistics(last_time + timedelta(seconds=1)) self.time = time_now + timedelta(seconds=1)
def setUp(self): super(TestMetricsGetOSFStatistics, self).setUp() Node.remove() time_now = get_previous_midnight() NodeFactory(category='project', date_created=time_now) NodeFactory(category='project', date_created=time_now - timedelta(days=1)) last_time = time_now - timedelta(days=2) NodeFactory(category='project', date_created=last_time) NodeFactory(category='project', date_created=last_time) get_days_statistics(last_time + timedelta(seconds=1)) self.time = time_now + timedelta(seconds=1)
def get_target(self, node_id, target_id): node = Node.load(target_id) if node and node_id != target_id: raise ValueError('Cannot post comment to another node.') elif target_id == node_id: return Node.load(node_id) else: comment = Comment.load(target_id) if comment: return comment else: raise ValueError
def _rejection_url_context(self, user_id): user_approval_state = self.approval_state.get(user_id, {}) rejection_token = user_approval_state.get('rejection_token') if rejection_token: from website.project.model import Node root_registration = Node.find_one(Q('retraction', 'eq', self)) node_id = user_approval_state.get('node_id', root_registration._id) registration = Node.load(node_id) return { 'node_id': registration.registered_from._id, 'token': rejection_token, }
def update_file_guid_referent(self, node, event_type, payload, user=None): if event_type == 'addon_file_moved' or event_type == 'addon_file_renamed': source = payload['source'] destination = payload['destination'] source_node = Node.load(source['node']['_id']) destination_node = node file_guids = FileNode.resolve_class(source['provider'], FileNode.ANY).get_file_guids( materialized_path=source['materialized'] if source['provider'] != 'osfstorage' else source['path'], provider=source['provider'], node=source_node) if event_type == 'addon_file_renamed' and source['provider'] in settings.ADDONS_BASED_ON_IDS: return if event_type == 'addon_file_moved' and (source['provider'] == destination['provider'] and source['provider'] in settings.ADDONS_BASED_ON_IDS) and source_node == destination_node: return for guid in file_guids: obj = Guid.load(guid) if source_node != destination_node and Comment.find(Q('root_target', 'eq', guid)).count() != 0: update_comment_node(guid, source_node, destination_node) if source['provider'] != destination['provider'] or source['provider'] != 'osfstorage': old_file = FileNode.load(obj.referent._id) obj.referent = create_new_file(obj, source, destination, destination_node) obj.save() if old_file and not TrashedFileNode.load(old_file._id): old_file.delete()
def get_nodes_with_oauth_grants(self, external_account): # Generator of nodes which have grants for this external account return ( Node.load(node_id) for node_id, grants in self.oauth_grants.iteritems() if external_account._id in grants.keys() )
def get_node_title(self, obj): user = self.context['request'].user node_title = obj['node']['title'] node = Node.load(obj['node']['_id']) if node.has_permission(user, osf_permissions.READ): return node_title return 'Private Component'
def _email_template_context(self, user, node, is_authorizer=False, urls=None): urls = urls or self.stashed_urls.get(user._id, {}) registration_link = urls.get('view', self._view_url(user._id, node)) if is_authorizer: from website.project.model import Node approval_link = urls.get('approve', '') disapproval_link = urls.get('reject', '') approval_time_span = settings.RETRACTION_PENDING_TIME.days * 24 registration = Node.find_one(Q('retraction', 'eq', self)) return { 'is_initiator': self.initiated_by == user, 'initiated_by': self.initiated_by.fullname, 'project_name': registration.title, 'registration_link': registration_link, 'approval_link': approval_link, 'disapproval_link': disapproval_link, 'approval_time_span': approval_time_span, } else: return { 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, }
def test_is_registration_approved(self): registration_approval = factories.RegistrationApprovalFactory() registration = Node.find_one( Q('registration_approval', 'eq', registration_approval)) with mock.patch('website.project.sanctions.Sanction.is_approved', mock.Mock(return_value=True)): assert_true(registration.is_registration_approved)
def fix_wiki_titles(wiki_pages): for i, wiki in enumerate(wiki_pages): old_name = wiki['page_name'] new_name = wiki['page_name'].replace('/', '') # update wiki page name db.nodewikipage.update({'_id': wiki['_id']}, {'$set': { 'page_name': new_name }}) logger.info('Updated wiki {} title to {}'.format( wiki['_id'], new_name)) node = Node.load(wiki['node']) if not node: logger.info('Invalid node {} for wiki {}'.format( node, wiki['_id'])) continue # update node wiki page records if old_name in node.wiki_pages_versions: node.wiki_pages_versions[new_name] = node.wiki_pages_versions[ old_name] del node.wiki_pages_versions[old_name] if old_name in node.wiki_pages_current: node.wiki_pages_current[new_name] = node.wiki_pages_current[ old_name] del node.wiki_pages_current[old_name] if old_name in node.wiki_private_uuids: node.wiki_private_uuids[new_name] = node.wiki_private_uuids[ old_name] del node.wiki_private_uuids[old_name] node.save()
def get_public_projects(uid=None, user=None): user = user or User.load(uid) # In future redesign, should be limited for users with many projects / components nodes = Node.find_for_user(user, subquery=(TOP_LEVEL_PROJECT_QUERY & Q('is_public', 'eq', True))) return _render_nodes(list(nodes))
def archive_success(dst_pk, job_pk): """Archiver's final callback. For the time being the use case for this task is to rewrite references to files selected in a registration schema (the Prereg Challenge being the first to expose this feature). The created references point to files on the registered_from Node (needed for previewing schema data), and must be re-associated with the corresponding files in the newly created registration. :param str dst_pk: primary key of registration Node note:: At first glance this task makes redundant calls to utils.get_file_map (which returns a generator yielding (<sha256>, <file_metadata>) pairs) on the dst Node. Two notes about utils.get_file_map: 1) this function memoizes previous results to reduce overhead and 2) this function returns a generator that lazily fetches the file metadata of child Nodes (it is possible for a selected file to belong to a child Node) using a non-recursive DFS. Combined this allows for a relatively effient implementation with seemingly redundant calls. """ create_app_context() dst = Node.load(dst_pk) # The filePicker extension addded with the Prereg Challenge registration schema # allows users to select files in OSFStorage as their response to some schema # questions. These files are references to files on the unregistered Node, and # consequently we must migrate those file paths after archiver has run. Using # sha256 hashes is a convenient way to identify files post-archival. for schema in dst.registered_schema: if schema.has_files: utils.migrate_file_metadata(dst, schema) job = ArchiveJob.load(job_pk) if not job.sent: job.sent = True job.save() dst.sanction.ask( dst.get_active_contributors_recursive(unique_users=True))
def get_paginated_response(self, data): """Add number of unread comments to links.meta when viewing list of comments filtered by a target node, file or wiki page.""" response = super(CommentPagination, self).get_paginated_response(data) response_dict = response.data kwargs = self.request.parser_context['kwargs'].copy() if self.request.query_params.get('related_counts', False): target_id = self.request.query_params.get('filter[target]', None) node_id = kwargs.get('node_id', None) node = Node.load(node_id) user = self.request.user if target_id and not user.is_anonymous() and node.is_contributor(user): root_target = Guid.load(target_id) if root_target: page = getattr(root_target.referent, 'root_target_page', None) if page: if not len(data): unread = 0 else: unread = Comment.find_n_unread(user=user, node=node, page=page, root_id=target_id) if self.request.version < '2.1': response_dict['links']['meta']['unread'] = unread else: response_dict['meta']['unread'] = unread return Response(response_dict)
def update(self, instance, validated_data): view_only_link = instance['self'] nodes = instance['data'] user = self.context['request'].user new_nodes = validated_data['data'] add, remove = self.get_nodes_to_add_remove(nodes=nodes, new_nodes=new_nodes) for node in remove: if not node.has_permission(user, 'admin'): raise PermissionDenied view_only_link.nodes.remove(node) view_only_link.save() nodes = [Node.load(node) for node in view_only_link.nodes] eligible_nodes = self.get_eligible_nodes(nodes) for node in add: if not node.has_permission(user, 'admin'): raise PermissionDenied if node not in eligible_nodes: raise NonDescendantNodeError(node_id=node._id) view_only_link.nodes.append(node) view_only_link.save() return self.make_instance_obj(view_only_link)
def archive_success(dst_pk, job_pk): """Archiver's final callback. For the time being the use case for this task is to rewrite references to files selected in a registration schema (the Prereg Challenge being the first to expose this feature). The created references point to files on the registered_from Node (needed for previewing schema data), and must be re-associated with the corresponding files in the newly created registration. :param str dst_pk: primary key of registration Node note:: At first glance this task makes redundant calls to utils.get_file_map (which returns a generator yielding (<sha256>, <file_metadata>) pairs) on the dst Node. Two notes about utils.get_file_map: 1) this function memoizes previous results to reduce overhead and 2) this function returns a generator that lazily fetches the file metadata of child Nodes (it is possible for a selected file to belong to a child Node) using a non-recursive DFS. Combined this allows for a relatively effient implementation with seemingly redundant calls. """ create_app_context() dst = Node.load(dst_pk) # The filePicker extension addded with the Prereg Challenge registration schema # allows users to select files in OSFStorage as their response to some schema # questions. These files are references to files on the unregistered Node, and # consequently we must migrate those file paths after archiver has run. Using # sha256 hashes is a convenient way to identify files post-archival. for schema in dst.registered_schema: if schema.has_files: utils.migrate_file_metadata(dst, schema) job = ArchiveJob.load(job_pk) if not job.sent: job.sent = True job.save() dst.sanction.ask(dst.get_active_contributors_recursive(unique_users=True))
def on_delete(self): super(AddonOAuthUserSettingsBase, self).on_delete() nodes = [Node.load(node_id) for node_id in self.oauth_grants.keys()] for node in nodes: node_addon = node.get_addon(self.oauth_provider.short_name) if node_addon and node_addon.user_settings == self: node_addon.clear_auth()
def get_paginated_response(self, data): """Add number of unread comments to links.meta when viewing list of comments filtered by a target node, file or wiki page.""" response = super(CommentPagination, self).get_paginated_response(data) response_dict = response.data kwargs = self.request.parser_context['kwargs'].copy() if self.request.query_params.get('related_counts', False): target_id = self.request.query_params.get('filter[target]', None) node_id = kwargs.get('node_id', None) node = Node.load(node_id) user = self.request.user if target_id and not user.is_anonymous() and node.is_contributor( user): root_target = Guid.load(target_id) if root_target: page = getattr(root_target.referent, 'root_target_page', None) if page: if not len(data): unread = 0 else: unread = Comment.find_n_unread(user=user, node=node, page=page, root_id=target_id) response_dict['links']['meta']['unread'] = unread return Response(response_dict)
def do_migration(): dupe_nodes = [n for n in Node.find(Q('_id', 'in', list(set([l.node._id for l in NodeLog.find(Q('action', 'eq', 'preprint_license_updated'))])))) if NodeLog.find(Q('action', 'eq', 'preprint_license_updated') & Q('node', 'eq', n._id)).count() > 1] logger.info('Found {} nodes with multiple preprint_license_updated logs'.format(len(dupe_nodes))) for node in dupe_nodes: preprint_license_updated_logs = [log for log in node.logs if log.action == 'preprint_license_updated'] log = preprint_license_updated_logs.pop() while(preprint_license_updated_logs): next_log = preprint_license_updated_logs.pop() timedelta = log.date - next_log.date if timedelta.seconds < 60: logger.info( 'Hiding duplicate preprint_license_updated log with ID {} from node {}, timedelta was {}'.format( log._id, node._id, timedelta ) ) log.should_hide = True log.save() else: logger.info( 'Skipping preprint_license_updated log with ID {} from node {}, timedelta was {}'.format( log._id, node._id, timedelta ) ) log = next_log
def main(send_email=False): logger.info('Starting Project storage audit') init_app(set_backends=True, routes=False) lines = [] projects = {} users = defaultdict(lambda: (0, 0)) for node in Node.find(Q('__backrefs.parent.node.nodes', 'eq', None)): # ODM hack to ignore all nodes with parents if node._id in WHITE_LIST: continue # Dont count whitelisted nodes against users projects[node] = get_usage(node) for contrib in node.contributors: if node.can_edit(user=contrib): users[contrib] = tuple(map(sum, zip(users[contrib], projects[node]))) # Adds tuples together, map(sum, zip((a, b), (c, d))) -> (a+c, b+d) for collection, limit in ((users, USER_LIMIT), (projects, PROJECT_LIMIT)): for item, (used, deleted) in filter(functools.partial(limit_filter, limit), collection.items()): line = '{!r} has exceeded the limit {:.2f}GBs ({}b) with {:.2f}GBs ({}b) used and {:.2f}GBs ({}b) deleted.'.format(item, limit / GBs, limit, used / GBs, used, deleted / GBs, deleted) logger.info(line) lines.append(line) if lines: if send_email: logger.info('Sending email...') mails.send_mail('*****@*****.**', mails.EMPTY, body='\n'.join(lines), subject='Script: OsfStorage usage audit') else: logger.info('send_email is False, not sending email'.format(len(lines))) logger.info('{} offending project(s) and user(s) found'.format(len(lines))) else: logger.info('No offending projects or users found')
def _view_url_context(self, user_id, node): from website.project.model import Node registration = Node.find_one(Q('retraction', 'eq', self)) return { 'node_id': registration._id }
def fix_wiki_titles(wiki_pages): for i, wiki in enumerate(wiki_pages): old_name = wiki["page_name"] new_name = wiki["page_name"].replace("/", "") # update wiki page name db.nodewikipage.update({"_id": wiki["_id"]}, {"$set": {"page_name": new_name}}) logger.info("Updated wiki {} title to {}".format(wiki["_id"], new_name)) node = Node.load(wiki["node"]) if not node: logger.info("Invalid node {} for wiki {}".format(node, wiki["_id"])) continue # update node wiki page records if old_name in node.wiki_pages_versions: node.wiki_pages_versions[new_name] = node.wiki_pages_versions[old_name] del node.wiki_pages_versions[old_name] if old_name in node.wiki_pages_current: node.wiki_pages_current[new_name] = node.wiki_pages_current[old_name] del node.wiki_pages_current[old_name] if old_name in node.wiki_private_uuids: node.wiki_private_uuids[new_name] = node.wiki_private_uuids[old_name] del node.wiki_private_uuids[old_name] node.save()
def test_sanction_embargo_termination_first(self): embargo_termination_approval = factories.EmbargoTerminationApprovalFactory( ) registration = Node.find_one( Q('embargo_termination_approval', 'eq', embargo_termination_approval)) assert_equal(registration.sanction, embargo_termination_approval)
def _on_complete(self, user): from website.project.model import Node, NodeLog parent_registration = Node.find_one(Q('retraction', 'eq', self)) parent_registration.registered_from.add_log( action=NodeLog.RETRACTION_APPROVED, params={ 'node': parent_registration.registered_from_id, 'retraction_id': self._id, 'registration': parent_registration._id }, auth=Auth(self.initiated_by), ) # Remove any embargoes associated with the registration if parent_registration.embargo_end_date or parent_registration.is_pending_embargo: parent_registration.embargo.state = self.REJECTED parent_registration.registered_from.add_log( action=NodeLog.EMBARGO_CANCELLED, params={ 'node': parent_registration.registered_from_id, 'registration': parent_registration._id, 'embargo_id': parent_registration.embargo._id, }, auth=Auth(self.initiated_by), ) parent_registration.embargo.save() # Ensure retracted registration is public # Pass auth=None because the registration initiator may not be # an admin on components (component admins had the opportunity # to disapprove the retraction by this point) for node in parent_registration.node_and_primary_descendants(): node.set_privacy('public', auth=None, save=True, log=False) node.update_search()
def _on_complete(self, user): from website.project.model import Node, NodeLog parent_registration = Node.find_one(Q('retraction', 'eq', self)) parent_registration.registered_from.add_log( action=NodeLog.RETRACTION_APPROVED, params={ 'node': parent_registration._id, 'retraction_id': self._id, }, auth=Auth(self.initiated_by), ) # Remove any embargoes associated with the registration if parent_registration.embargo_end_date or parent_registration.is_pending_embargo: parent_registration.embargo.state = self.REJECTED parent_registration.registered_from.add_log( action=NodeLog.EMBARGO_CANCELLED, params={ 'node': parent_registration._id, 'embargo_id': parent_registration.embargo._id, }, auth=Auth(self.initiated_by), ) parent_registration.embargo.save() # Ensure retracted registration is public # Pass auth=None because the registration initiator may not be # an admin on components (component admins had the opportunity # to disapprove the retraction by this point) for node in parent_registration.node_and_primary_descendants(): node.set_privacy('public', auth=None, save=True, log=False) node.update_search()
def update(self, instance, validated_data): view_only_link = instance['self'] nodes = instance['data'] user = self.context['request'].user new_nodes = validated_data['data'] add, remove = self.get_nodes_to_add_remove( nodes=nodes, new_nodes=new_nodes ) for node in remove: if not node.has_permission(user, 'admin'): raise PermissionDenied view_only_link.nodes.remove(node) view_only_link.save() nodes = [Node.load(node) for node in view_only_link.nodes] eligible_nodes = self.get_eligible_nodes(nodes) for node in add: if not node.has_permission(user, 'admin'): raise PermissionDenied if node not in eligible_nodes: raise NonDescendantNodeError(node_id=node._id) view_only_link.nodes.append(node) view_only_link.save() return self.make_instance_obj(view_only_link)
def get_projects(user): """Return a list of user's projects, excluding registrations and folders.""" return list( Node.find_for_user( user, (Q('category', 'eq', 'project') & Q('is_registration', 'eq', False) & Q('is_deleted', 'eq', False) & Q('is_folder', 'eq', False))))
def dropbox_oauth_finish(auth, **kwargs): """View called when the Oauth flow is completed. Adds a new DropboxUserSettings record to the user and saves the user's access token and account info. """ if not auth.logged_in: raise HTTPError(http.FORBIDDEN) user = auth.user node = Node.load(session.data.get('dropbox_auth_nid')) result = finish_auth(node) # If result is a redirect response, follow the redirect if isinstance(result, BaseResponse): return result # Make sure user has dropbox enabled user.add_addon('dropbox') user.save() user_settings = user.get_addon('dropbox') user_settings.owner = user user_settings.access_token = result.access_token user_settings.dropbox_id = result.dropbox_id client = get_client_from_user_settings(user_settings) user_settings.dropbox_info = client.account_info() user_settings.save() if node: del session.data['dropbox_auth_nid'] # Automatically use newly-created auth if node.has_addon('dropbox'): node_addon = node.get_addon('dropbox') node_addon.set_user_auth(user_settings) node_addon.save() return redirect(node.web_url_for('node_setting')) return redirect(web_url_for('user_addons'))
def menbib_oauth_finish(**kwargs): user = get_current_user() if not user: raise HTTPError(http.FORBIDDEN) node = Node.load(session.data.get('menbib_auth_nid')) result = finish_auth() user.add_addon('menbib') user.save() user_settings = user.get_addon('menbib') user_settings.owner = user user_settings.access_token = result.access_token user_settings.refresh_token = result.refresh_token user_settings.token_type = result.token_type user_settings.expires_in = result.expires_in user_settings.save() flash('Successfully authorized Mendeley', 'success') if node: del session.data['menbib_auth_nid'] if node.has_addon('menbib'): node_addon = node.get_addon('menbib') node_addon.set_user_auth(user_settings) node_addon.save() return redirect(node.web_url_for('node_setting')) return redirect(web_url_for('user_addons'))
def get_public_projects(user): """Return a list of a user's public projects.""" # Avoid circular import from website.project.utils import TOP_LEVEL_PROJECT_QUERY return Node.find_for_user(user, subquery=(Q('is_public', 'eq', True) & TOP_LEVEL_PROJECT_QUERY))
def get_projects(user): """Return a list of user's projects, excluding registrations and folders.""" # Note: If the user is a contributor to a child (but does not have access to the parent), it will be # excluded from this view # Avoid circular import from website.project.utils import TOP_LEVEL_PROJECT_QUERY return Node.find_for_user(user, subquery=TOP_LEVEL_PROJECT_QUERY)
def get_public_components(uid=None, user=None): user = user or User.load(uid) # TODO: This should use User.visible_contributor_to? # In future redesign, should be limited for users with many projects / components nodes = list( Node.find_for_user(user, subquery=(PROJECT_QUERY & Q("parent_node", "ne", None) & Q("is_public", "eq", True))) ) return _render_nodes(nodes, show_path=True)
def test_delete_registration_tree(self): proj = factories.NodeFactory() factories.NodeFactory(parent=proj) comp2 = factories.NodeFactory(parent=proj) factories.NodeFactory(parent=comp2) reg = factories.RegistrationFactory(project=proj) reg_ids = [reg._id] + [r._id for r in reg.get_descendants_recursive()] archiver_utils.delete_registration_tree(reg) assert_false(Node.find(Q('_id', 'in', reg_ids) & Q('is_deleted', 'eq', False)).count())
def on_delete(self): """When the user deactivates the addon, clear auth for connected nodes. """ super(AddonOAuthUserSettingsBase, self).on_delete() nodes = [Node.load(node_id) for node_id in self.oauth_grants.keys()] for node in nodes: node_addon = node.get_addon(self.oauth_provider.short_name) if node_addon and node_addon.user_settings == self: node_addon.clear_auth()
def box_oauth_finish(auth, **kwargs): """View called when the Oauth flow is completed. Adds a new BoxUserSettings record to the user and saves the user's access token and account info. """ user = auth.user node = Node.load(session.data.pop('box_auth_nid', None)) # Handle request cancellations from Box's API if request.args.get('error'): flash('Box authorization request cancelled.') if node: return redirect(node.web_url_for('node_setting')) return redirect(web_url_for('user_addons')) result = finish_auth() # If result is a redirect response, follow the redirect if isinstance(result, BaseResponse): return result client = BoxClient(CredentialsV2( result['access_token'], result['refresh_token'], settings.BOX_KEY, settings.BOX_SECRET, )) about = client.get_user_info() oauth_settings = BoxOAuthSettings.load(about['id']) if not oauth_settings: oauth_settings = BoxOAuthSettings(user_id=about['id'], username=about['name']) oauth_settings.save() oauth_settings.refresh_token = result['refresh_token'] oauth_settings.access_token = result['access_token'] oauth_settings.expires_at = datetime.utcfromtimestamp(time.time() + 3600) # Make sure user has box enabled user.add_addon('box') user.save() user_settings = user.get_addon('box') user_settings.oauth_settings = oauth_settings user_settings.save() flash('Successfully authorized Box', 'success') if node: # Automatically use newly-created auth if node.has_addon('box'): node_addon = node.get_addon('box') node_addon.set_user_auth(user_settings) node_addon.save() return redirect(node.web_url_for('node_setting')) return redirect(web_url_for('user_addons'))
def get_public_projects(uid=None, user=None): user = user or User.load(uid) nodes = Node.find_for_user( user, subquery=(Q('category', 'eq', 'project') & Q('is_public', 'eq', True) & Q('is_registration', 'eq', False) & Q('is_deleted', 'eq', False))) return _render_nodes(list(nodes))
def migrate_nodes(): migrated_count = 0 for node in Node.find(): was_migrated = migrate_category(node) if was_migrated: node.save() logger.info('Migrated {0}'.format(node._id)) migrated_count += 1 logger.info('Finished migrating {0} nodes.'.format(migrated_count))