def test_request_embargo_termination_not_embargoed(self, user, not_embargoed): with pytest.raises(NodeStateError): not_embargoed.request_embargo_termination(Auth(user))
def after_delete(self, node, user): self.deauthorize(Auth(user=user), log=True, save=True)
def test_fork_by_authorizer(self, mock_push_status): fork = self.node.fork_node(auth=Auth(user=self.node.creator)) self.user_settings.reload() assert_true(fork.get_addon(self.short_name).has_auth) assert_true(self.user_settings.verify_oauth_access(fork, self.external_account))
def test_format_preprint_nones(self): self.preprint.node.tags = [] self.preprint.date_published = None self.preprint.node.preprint_article_doi = None self.preprint.set_subjects([], auth=Auth(self.preprint.node.creator), save=False) res = format_preprint(self.preprint) assert self.preprint.provider != 'osf' assert set(gn['@type'] for gn in res) == { 'creator', 'contributor', 'workidentifier', 'agentidentifier', 'person', 'preprint' } nodes = dict(enumerate(res)) preprint = nodes.pop( next(k for k, v in nodes.items() if v['@type'] == 'preprint')) assert preprint['title'] == self.preprint.node.title assert preprint['description'] == self.preprint.node.description assert preprint['is_deleted'] == ( not self.preprint.is_published or not self.preprint.node.is_public or self.preprint.node.is_preprint_orphan) assert preprint[ 'date_updated'] == self.preprint.date_modified.isoformat() assert preprint.get('date_published') is None people = sorted( [nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'person'], key=lambda x: x['given_name']) expected_people = sorted( [{ '@type': 'person', 'given_name': u'BoJack', 'family_name': u'Horseman', }, { '@type': 'person', 'given_name': self.user.given_name, 'family_name': self.user.family_name, }, { '@type': 'person', 'given_name': self.preprint.node.creator.given_name, 'family_name': self.preprint.node.creator.family_name, }], key=lambda x: x['given_name']) for i, p in enumerate(expected_people): expected_people[i]['@id'] = people[i]['@id'] assert people == expected_people creators = sorted([ nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'creator' ], key=lambda x: x['order_cited']) assert creators == [{ '@id': creators[0]['@id'], '@type': 'creator', 'order_cited': 0, 'cited_as': self.preprint.node.creator.fullname, 'agent': { '@id': [ p['@id'] for p in people if p['given_name'] == self.preprint.node.creator.given_name ][0], '@type': 'person' }, 'creative_work': { '@id': preprint['@id'], '@type': preprint['@type'] }, }, { '@id': creators[1]['@id'], '@type': 'creator', 'order_cited': 1, 'cited_as': u'BoJack Horseman', 'agent': { '@id': [p['@id'] for p in people if p['given_name'] == u'BoJack'][0], '@type': 'person' }, 'creative_work': { '@id': preprint['@id'], '@type': preprint['@type'] }, }] contributors = [ nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'contributor' ] assert contributors == [{ '@id': contributors[0]['@id'], '@type': 'contributor', 'cited_as': self.user.fullname, 'agent': { '@id': [ p['@id'] for p in people if p['given_name'] == self.user.given_name ][0], '@type': 'person' }, 'creative_work': { '@id': preprint['@id'], '@type': preprint['@type'] }, }] agentidentifiers = { nodes.pop(k)['uri'] for k, v in nodes.items() if v['@type'] == 'agentidentifier' } assert agentidentifiers == set([ 'mailto:' + self.user.username, 'mailto:' + self.preprint.node.creator.username, self.user.profile_image_url(), self.preprint.node.creator.profile_image_url(), ]) | set( urlparse.urljoin(settings.DOMAIN, user.profile_url) for user in self.preprint.node.contributors if user.is_registered) workidentifiers = { nodes.pop(k)['uri'] for k, v in nodes.items() if v['@type'] == 'workidentifier' } # URLs should *always* be osf.io/guid/ assert workidentifiers == set( [urlparse.urljoin(settings.DOMAIN, self.preprint._id) + '/']) assert nodes == {}
def main_task(osf_cookie, data, request_info): ''' Creates a temporary folder to download the files from the FTP server, downloads from it, uploads to the selected storage and deletes the temporary files. ''' try: tmp_path = create_tmp_folder(request_info['uid']) if not tmp_path: raise OSError('Could not create temporary folder.') try: downloaded = DOWNLOAD_FUNCTIONS[data['protocol']](tmp_path, data) if not downloaded: raise RuntimeError('Could not download the file(s) from the FTP server.') except IOError: raise RuntimeError('Could not download the file(s) from the FTP server.') dest_path = 'osfstorage/' if 'destFolderId' not in data else data['destFolderId'] uploaded = waterbutler.upload_folder_recursive(osf_cookie, data['destPid'], tmp_path, dest_path) shutil.rmtree(tmp_path) # Variables for logging into recent activity node = AbstractNode.load(request_info['node_id']) user = OSFUser.load(request_info['uid']) auth = Auth(user=user) if uploaded['fail_file'] > 0 or uploaded['fail_folder'] > 0: # Recent activity log node.add_log( action='ftp_upload_fail', params={ 'node': request_info['node_id'], 'project': request_info['pid'], 'filecount': uploaded['fail_file'], 'foldercount': uploaded['fail_folder'] }, auth=auth ) # Exception fails = [] if uploaded['fail_file'] > 0: fails.append('%s file(s)' % uploaded['fail_file']) if uploaded['fail_folder'] > 0: fails.append('%s folder(s)' % uploaded['fail_folder']) message = 'Failed to upload %s to storage.' % ( ' and '.join(fails) ) raise RuntimeError(message) node.add_log( action='ftp_upload_success', params={ 'node': request_info['node_id'], 'project': request_info['pid'], }, auth=auth ) except SoftTimeLimitExceeded: tmp_path = tmp_path if 'tmp_path' in locals() else None fail_cleanup(tmp_path) except Exception: tmp_path = tmp_path if 'tmp_path' in locals() else None fail_cleanup(tmp_path) raise return True
def test_must_have_addon_user_false(self, mock_current_user): mock_current_user.return_value = Auth(self.project.creator) self.project.creator.delete_addon('github') decorated = must_have_addon('github', 'user')(needs_addon_view) with assert_raises(HTTPError): decorated()
def create_waterbutler_log(payload, **kwargs): with TokuTransaction(): try: auth = payload['auth'] action = LOG_ACTION_MAP[payload['action']] except KeyError: raise HTTPError(httplib.BAD_REQUEST) user = User.load(auth['id']) if user is None: raise HTTPError(httplib.BAD_REQUEST) auth = Auth(user=user) node = kwargs['node'] or kwargs['project'] if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED): for bundle in ('source', 'destination'): for key in ('provider', 'materialized', 'name', 'nid'): if key not in payload[bundle]: raise HTTPError(httplib.BAD_REQUEST) dest = payload['destination'] src = payload['source'] if src is not None and dest is not None: dest_path = dest['materialized'] src_path = src['materialized'] if dest_path.endswith('/') and src_path.endswith('/'): dest_path = os.path.dirname(dest_path) src_path = os.path.dirname(src_path) if (os.path.split(dest_path)[0] == os.path.split(src_path)[0] and dest['provider'] == src['provider'] and dest['nid'] == src['nid'] and dest['name'] != src['name']): action = LOG_ACTION_MAP['rename'] destination_node = node # For clarity source_node = Node.load(payload['source']['nid']) source = source_node.get_addon(payload['source']['provider']) destination = node.get_addon(payload['destination']['provider']) payload['source'].update({ 'materialized': payload['source']['materialized'].lstrip('/'), 'addon': source.config.full_name, 'url': source_node.web_url_for( 'addon_view_or_download_file', path=payload['source']['path'].lstrip('/'), provider=payload['source']['provider']), 'node': { '_id': source_node._id, 'url': source_node.url, 'title': source_node.title, } }) payload['destination'].update({ 'materialized': payload['destination']['materialized'].lstrip('/'), 'addon': destination.config.full_name, 'url': destination_node.web_url_for( 'addon_view_or_download_file', path=payload['destination']['path'].lstrip('/'), provider=payload['destination']['provider']), 'node': { '_id': destination_node._id, 'url': destination_node.url, 'title': destination_node.title, } }) payload.update({ 'node': destination_node._id, 'project': destination_node.parent_id, }) if not payload.get('errors'): destination_node.add_log(action=action, auth=auth, params=payload) if payload.get('email') is True or payload.get('errors'): mails.send_mail( user.username, mails.FILE_OPERATION_FAILED if payload.get('errors') else mails.FILE_OPERATION_SUCCESS, action=payload['action'], source_node=source_node, destination_node=destination_node, source_path=payload['source']['materialized'], destination_path=payload['source']['materialized'], source_addon=payload['source']['addon'], destination_addon=payload['destination']['addon'], ) if payload.get('error'): # Action failed but our function succeeded # Bail out to avoid file_signals return {'status': 'success'} else: try: metadata = payload['metadata'] node_addon = node.get_addon(payload['provider']) except KeyError: raise HTTPError(httplib.BAD_REQUEST) if node_addon is None: raise HTTPError(httplib.BAD_REQUEST) metadata['path'] = metadata['path'].lstrip('/') node_addon.create_waterbutler_log(auth, action, metadata) with TokuTransaction(): file_signals.file_updated.send(node=node, user=user, event_type=action, payload=payload) return {'status': 'success'}
def create_waterbutler_log(payload, **kwargs): with transaction.atomic(): try: auth = payload['auth'] # Don't log download actions if payload['action'] in DOWNLOAD_ACTIONS: guid = Guid.load(payload['metadata'].get('nid')) if guid: node = guid.referent return {'status': 'success'} user = OSFUser.load(auth['id']) if user is None: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) action = LOG_ACTION_MAP[payload['action']] except KeyError: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) auth = Auth(user=user) node = kwargs.get('node') or kwargs.get('project') or Preprint.load(kwargs.get('nid')) or Preprint.load(kwargs.get('pid')) if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED): for bundle in ('source', 'destination'): for key in ('provider', 'materialized', 'name', 'nid'): if key not in payload[bundle]: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) dest = payload['destination'] src = payload['source'] if src is not None and dest is not None: dest_path = dest['materialized'] src_path = src['materialized'] if dest_path.endswith('/') and src_path.endswith('/'): dest_path = os.path.dirname(dest_path) src_path = os.path.dirname(src_path) if ( os.path.split(dest_path)[0] == os.path.split(src_path)[0] and dest['provider'] == src['provider'] and dest['nid'] == src['nid'] and dest['name'] != src['name'] ): action = LOG_ACTION_MAP['rename'] destination_node = node # For clarity source_node = AbstractNode.load(src['nid']) or Preprint.load(src['nid']) # We return provider fullname so we need to load node settings, if applicable source = None if hasattr(source_node, 'get_addon'): source = source_node.get_addon(payload['source']['provider']) destination = None if hasattr(node, 'get_addon'): destination = node.get_addon(payload['destination']['provider']) payload['source'].update({ 'materialized': payload['source']['materialized'].lstrip('/'), 'addon': source.config.full_name if source else 'osfstorage', 'url': source_node.web_url_for( 'addon_view_or_download_file', path=payload['source']['path'].lstrip('/'), provider=payload['source']['provider'] ), 'node': { '_id': source_node._id, 'url': source_node.url, 'title': source_node.title, } }) payload['destination'].update({ 'materialized': payload['destination']['materialized'].lstrip('/'), 'addon': destination.config.full_name if destination else 'osfstorage', 'url': destination_node.web_url_for( 'addon_view_or_download_file', path=payload['destination']['path'].lstrip('/'), provider=payload['destination']['provider'] ), 'node': { '_id': destination_node._id, 'url': destination_node.url, 'title': destination_node.title, } }) if not payload.get('errors'): destination_node.add_log( action=action, auth=auth, params=payload ) if payload.get('email') is True or payload.get('errors'): mails.send_mail( user.username, mails.FILE_OPERATION_FAILED if payload.get('errors') else mails.FILE_OPERATION_SUCCESS, action=payload['action'], source_node=source_node, destination_node=destination_node, source_path=payload['source']['materialized'], source_addon=payload['source']['addon'], destination_addon=payload['destination']['addon'], osf_support_email=settings.OSF_SUPPORT_EMAIL ) if payload.get('errors'): # Action failed but our function succeeded # Bail out to avoid file_signals return {'status': 'success'} else: node.create_waterbutler_log(auth, action, payload) metadata = payload.get('metadata') or payload.get('destination') target_node = AbstractNode.load(metadata.get('nid')) if target_node and not target_node.is_quickfiles and payload['action'] != 'download_file': update_storage_usage_with_size(payload) with transaction.atomic(): file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload) return {'status': 'success'}
def _on_complete(self, user=None): super(EmbargoTerminationApproval, self)._on_complete(user) registration = self._get_registration() registration.terminate_embargo(Auth(user) if user else None)
def test_works(self): folder = utils.box_addon_folder(self.node_settings, Auth(self.user)) assert_true(isinstance(folder, list)) assert_true(isinstance(folder[0], dict))
def test_returns_none_unconfigured(self): self.node_settings.folder_id = None assert_is(utils.box_addon_folder(self.node_settings, Auth(self.user)), None)
def test_terminate_embargo_log_is_nouser(self, node, user, registration): registration.terminate_embargo(Auth(user)) last_log = node.logs.first() assert last_log.action == NodeLog.EMBARGO_TERMINATED assert last_log.user is None
def test_terminate_embargo_adds_log_to_registered_from( self, node, registration, user): registration.terminate_embargo(Auth(user)) last_log = node.logs.first() assert last_log.action == NodeLog.EMBARGO_TERMINATED
def test_terminate_embargo_makes_registrations_public( self, registration, user): registration.terminate_embargo(Auth(user)) for node in registration.node_and_primary_descendants(): assert node.is_public is True assert node.is_embargoed is False
def test_must_be_logged_in_decorator_with_no_user(self, mock_from_kwargs): mock_from_kwargs.return_value = Auth() resp = protected() assert_true(isinstance(resp, BaseResponse)) login_url = cas.get_login_url(service_url='http://localhost/') assert_equal(login_url, resp.headers.get('location'))
def test_after_delete(self): self.project.remove_node(Auth(user=self.project.creator)) # Ensure that changes to node settings have been saved self.node_settings.reload() assert_true(self.node_settings.user_settings is None)
def test_must_have_addon_user_true(self, mock_current_user): mock_current_user.return_value = Auth(self.project.creator) self.project.creator.add_addon('github') decorated = must_have_addon('github', 'user')(needs_addon_view) res = decorated() assert_equal(res, 'openaddon')
def set_user_settings(self, settings): self.external_account = self.ExternalAccountFactory() self.external_account.save() self.user.external_accounts.append(self.external_account) self.user.save() self.auth = Auth(self.user)
def _create(cls, target_class, project=None, is_public=False, schema=None, data=None, archive=False, embargo=None, registration_approval=None, retraction=None, *args, **kwargs): save_kwargs(**kwargs) user = None if project: user = project.creator user = kwargs.get('user') or kwargs.get( 'creator') or user or UserFactory() kwargs['creator'] = user # Original project to be registered project = project or target_class(*args, **kwargs) if user._id not in project.permissions: project.add_contributor( contributor=user, permissions=permissions.CREATOR_PERMISSIONS, log=False, save=False) project.save() # Default registration parameters schema = schema or get_default_metaschema() data = data or {'some': 'data'} auth = Auth(user=user) register = lambda: project.register_node( schema=schema, auth=auth, data=data) def add_approval_step(reg): if embargo: reg.embargo = embargo elif registration_approval: reg.registration_approval = registration_approval elif retraction: reg.retraction = retraction else: reg.require_approval(reg.creator) reg.save() reg.sanction.add_authorizer(reg.creator, reg) reg.sanction.save() if archive: reg = register() add_approval_step(reg) else: with patch('framework.celery_tasks.handlers.enqueue_task'): reg = register() add_approval_step(reg) with patch.object(reg.archive_job, 'archive_tree_finished', Mock(return_value=True)): reg.archive_job.status = ARCHIVER_SUCCESS reg.archive_job.save() reg.sanction.state = Sanction.APPROVED reg.sanction.save() ArchiveJob( src_node=project, dst_node=reg, initiator=user, ) if is_public: reg.is_public = True reg.save() return reg
def test_admin_can_set_subjects(self): initial_subjects = list(self.preprint.subjects.all()) self.preprint.set_subjects([[SubjectFactory()._id]], auth=Auth(self.user)) self.preprint.reload() assert_not_equal(initial_subjects, list(self.preprint.subjects.all()))
def test_admin_can_publish(self): assert_false(self.preprint.is_published) self.preprint.set_published(True, auth=Auth(self.user), save=True) assert_true(self.preprint.is_published)
def setUp(self): super(TestPreprintIdentifiers, self).setUp() self.user = AuthUserFactory() self.auth = Auth(user=self.user) self.preprint = PreprintFactory(is_published=False, creator=self.user)
def setUp(self): super(TestUtilsTests, self).setUp() self.node = ProjectFactory() self.user = self.node.creator self.auth = Auth(self.user)
def test_logged_in(self): user = UserFactory() auth_obj = Auth(user=user) assert_true(auth_obj.logged_in) auth2 = Auth(user=None) assert_false(auth2.logged_in)
def mock_archive(project, schema=None, auth=None, data=None, parent=None, embargo=False, embargo_end_date=None, retraction=False, justification=None, autoapprove_retraction=False, autocomplete=True, autoapprove=False): """ A context manager for registrations. When you want to call Node#register_node in a test but do not want to deal with any of this side effects of archiver, this helper allows for creating a registration in a safe fashion. :param bool embargo: embargo the registration (rather than RegistrationApproval) :param bool autocomplete: automatically finish archival? :param bool autoapprove: automatically approve registration approval? :param bool retraction: retract the registration? :param str justification: a justification for the retraction :param bool autoapprove_retraction: automatically approve retraction? Example use: project = ProjectFactory() with mock_archive(project) as registration: assert_true(registration.is_registration) assert_true(registration.archiving) assert_true(registration.is_pending_registration) with mock_archive(project, autocomplete=True) as registration: assert_true(registration.is_registration) assert_false(registration.archiving) assert_true(registration.is_pending_registration) with mock_archive(project, autocomplete=True, autoapprove=True) as registration: assert_true(registration.is_registration) assert_false(registration.archiving) assert_false(registration.is_pending_registration) """ schema = schema or get_default_metaschema() auth = auth or Auth(project.creator) data = data or '' with mock.patch('framework.celery_tasks.handlers.enqueue_task'): registration = project.register_node( schema=schema, auth=auth, data=data, parent=parent, ) if embargo: embargo_end_date = embargo_end_date or (timezone.now() + dt.timedelta(days=20)) registration.embargo_registration(project.creator, embargo_end_date) else: registration.require_approval(project.creator) if autocomplete: root_job = registration.archive_job root_job.status = ARCHIVER_SUCCESS root_job.sent = False root_job.done = True root_job.save() sanction = registration.sanction with contextlib.nested( mock.patch.object(root_job, 'archive_tree_finished', mock.Mock(return_value=True)), mock.patch('website.archiver.tasks.archive_success.delay', mock.Mock())): archiver_listeners.archive_callback(registration) if autoapprove: sanction = registration.sanction sanction.state = Sanction.APPROVED sanction.save() sanction._on_complete(project.creator) sanction.save() if retraction: justification = justification or 'Because reasons' registration.refresh_from_db() retraction = registration.retract_registration( project.creator, justification=justification) if autoapprove_retraction: retraction.state = Sanction.APPROVED retraction._on_complete(project.creator) retraction.save() registration.save() yield registration
def test_does_not_have_key(self, mock_from_kwargs): mock_from_kwargs.return_value = Auth(user=None) res = self.app.get('/project/{0}'.format(self.project._primary_key), {'key': None}) assert_is_redirect(res)
def test_after_delete(self): self.node.remove_node(Auth(user=self.node.creator)) # Ensure that changes to node settings have been saved self.node_settings.reload() assert_is_none(self.node_settings.user_settings) assert_is_none(self.node_settings.folder_id)
def test_must_be_logged_in_decorator_with_user(self, mock_from_kwargs): user = UserFactory() mock_from_kwargs.return_value = Auth(user=user) protected()
def after_delete(self, node, user): self.deauthorize(Auth(user=user), add_log=True) self.save()
def after_delete(self, user=None): self.deauthorize(Auth(user=user), add_log=True)