def test_archive_success_same_file_in_component(self): file_tree = file_tree_factory(3, 3, 3) selected = select_files_from_tree(file_tree).values()[0] child_file_tree = file_tree_factory(0, 0, 0) child_file_tree['children'] = [selected] node = factories.NodeFactory(creator=self.user) child = factories.NodeFactory(creator=self.user, parent=node) data = { ('q_' + selected['name']): { 'value': fake.word(), 'extra': [{ 'sha256': selected['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( child._id, selected['path'] ), 'selectedFileName': selected['name'], 'nodeId': child._id }] } } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) registration.reload() child_reg = registration.nodes[0] for key, question in registration.registered_meta[schema._id].items(): assert_in(child_reg._id, question['extra'][0]['viewUrl'])
def setUp(self): super(TestCollectionsSearch, self).setUp() search.delete_index(elastic_search.INDEX) search.create_index(elastic_search.INDEX) self.user = factories.UserFactory(fullname='Salif Keita') self.node_private = factories.NodeFactory(creator=self.user, title='Salif Keita: Madan', is_public=False) self.node_public = factories.NodeFactory(creator=self.user, title='Salif Keita: Yamore', is_public=True) self.node_one = factories.NodeFactory(creator=self.user, title='Salif Keita: Mandjou', is_public=True) self.node_two = factories.NodeFactory(creator=self.user, title='Salif Keita: Tekere', is_public=True) self.provider = factories.CollectionProviderFactory() self.collection_one = factories.CollectionFactory( title='Life of Salif Keita', creator=self.user, is_public=True, provider=self.provider) self.collection_public = factories.CollectionFactory( title='Best of Salif Keita', creator=self.user, is_public=True, provider=self.provider) self.collection_private = factories.CollectionFactory( title='Commentary: Best of Salif Keita', creator=self.user, is_public=False, provider=self.provider)
def test_archive_callback_on_tree_sends_only_one_email( self, mock_send_success, mock_arhive_success): proj = factories.NodeFactory() child = factories.NodeFactory(parent=proj) factories.NodeFactory(parent=child) reg = factories.RegistrationFactory(project=proj) rchild = reg._nodes.first() rchild2 = rchild._nodes.first() for node in [reg, rchild, rchild2]: node.archive_job._set_target('osfstorage') for node in [reg, rchild, rchild2]: node.archive_job.update_target('osfstorage', ARCHIVER_INITIATED) rchild.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild.save() listeners.archive_callback(rchild) assert_false(mock_send_success.called) reg.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) reg.save() listeners.archive_callback(reg) assert_false(mock_send_success.called) rchild2.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild2.save() listeners.archive_callback(rchild2) assert_equal(mock_send_success.call_count, 1) assert_true(mock_send_success.called)
def test_archive_callback_updates_archiving_state_when_done(self): proj = factories.NodeFactory() factories.NodeFactory(parent=proj) reg = factories.RegistrationFactory(project=proj) reg.archive_job.update_target('osfstorage', ARCHIVER_INITIATED) child = reg.nodes[0] child.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) child.save() listeners.archive_callback(child) assert_false(child.archiving)
def test_archive_success_with_components(self): node = factories.NodeFactory(creator=self.user) comp1 = factories.NodeFactory(parent=node, creator=self.user) factories.NodeFactory(parent=comp1, creator=self.user) factories.NodeFactory(parent=node, creator=self.user) nodes = [n for n in node.node_and_primary_descendants()] file_trees, selected_files, node_index = generate_file_tree(nodes) data = generate_metadata(file_trees, selected_files, node_index) schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=copy.deepcopy(data), autocomplete=True, autoapprove=True) as registration: def mock_get_file_tree(self, *args, **kwargs): return file_trees[self.owner.registered_from._id] with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock_get_file_tree): job = factories.ArchiveJobFactory( initiator=registration.creator) archive_success(registration._id, job._id) registration.reload() for key, question in registration.registered_meta[ schema._id].items(): target = None if isinstance(question['value'], dict): target = [ v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra'][0] ] elif 'extra' in question and 'sha256' in question['extra']: target = question if target: node_id = re.search(r'^/project/(?P<node_id>\w{5}).+$', target[0]['extra'][0] ['viewUrl']).groupdict()['node_id'] assert_in(node_id, [ r._id for r in registration.node_and_primary_descendants() ]) if target[0]['extra'][0]['sha256'] in selected_files: del selected_files[target[0]['extra'][0]['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) # ensure each selected file was checked assert_false(selected_files)
def test_archive_tree_finished_d3(self): proj = factories.NodeFactory() child = factories.NodeFactory(parent=proj) factories.NodeFactory(parent=child) reg = factories.RegistrationFactory(project=proj) rchild = reg._nodes.first() rchild2 = rchild._nodes.first() for node in [reg, rchild, rchild2]: node.archive_job._set_target('osfstorage') for node in [reg, rchild, rchild2]: node.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) for node in [reg, rchild, rchild2]: assert_true(node.archive_job.archive_tree_finished())
def test_get_file_map_memoization(self): node = factories.NodeFactory() comp1 = factories.NodeFactory(parent=node) factories.NodeFactory(parent=comp1) factories.NodeFactory(parent=node) with mock.patch.object(BaseStorageAddon, '_get_file_tree') as mock_get_file_tree: mock_get_file_tree.return_value = file_tree_factory(3, 3, 3) # first call archiver_utils.get_file_map(node) call_count = mock_get_file_tree.call_count # second call archiver_utils.get_file_map(node) assert_equal(mock_get_file_tree.call_count, call_count)
def test_archive_success_with_deeply_nested_schema(self): node = factories.NodeFactory(creator=self.user) file_trees, selected_files, node_index = generate_file_tree([node]) data = { ('q_' + selected_file['name']): { 'value': fake.word(), 'extra': [{ 'selectedFileName': selected_file['name'], 'nodeId': node._id, 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format(node._id, selected_file['path']) }] } for sha256, selected_file in selected_files.items() } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_trees[node._id])): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) registration.reload() for key, question in registration.registered_meta[schema._id].items(): target = None if isinstance(question['value'], dict): target = [v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra'][0]][0] elif 'extra' in question and 'sha256' in question['extra'][0]: target = question if target: assert_in(registration._id, target['extra'][0]['viewUrl']) assert_not_in(node._id, target['extra'][0]['viewUrl']) del selected_files[target['extra'][0]['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) assert_false(selected_files)
def setUp(self): with run_celery_tasks(): super(TestPublicNodes, self).setUp() self.user = factories.UserFactory(fullname='Doug Bogie') self.title = 'Red Special' self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory( title=self.title, creator=self.user, is_public=True, ) self.component = factories.NodeFactory( parent=self.project, title=self.title, creator=self.user, is_public=True ) self.registration = factories.RegistrationFactory( title=self.title, creator=self.user, is_public=True, ) self.registration.archive_job.target_addons = [] self.registration.archive_job.status = 'SUCCESS' self.registration.archive_job.save()
def test_archive_success(self): node = factories.NodeFactory(creator=self.user) file_trees, selected_files, node_index = generate_file_tree([node]) data = generate_metadata( file_trees, selected_files, node_index ) schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_trees[node._id])): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) registration.reload() for key, question in registration.registered_meta[schema._id].items(): target = None if isinstance(question.get('value'), dict): target = [v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra'][0]][0] elif 'extra' in question and 'hashes' in question['extra'][0]: target = question if target: assert_in(registration._id, target['extra'][0]['viewUrl']) assert_not_in(node._id, target['extra'][0]['viewUrl']) del selected_files[target['extra'][0]['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) assert_false(selected_files)
def test_field_with_non_attribute(self): req = make_drf_request_with_version(version='2.0') project = factories.ProjectFactory() node = factories.NodeFactory(parent=project) data = self.BasicNodeSerializer(node, context={'request': req}).data['data'] field = data['relationships']['not_attribute_on_target']['links'] assert_in('/v2/nodes/{}/children/'.format('12345'), field['related']['href'])
def test_field_with_two_kwargs(self): req = make_drf_request_with_version(version='2.0') project = factories.ProjectFactory() node = factories.NodeFactory(parent=project) data = self.BasicNodeSerializer(node, context={'request': req}).data['data'] field = data['relationships']['two_url_kwargs']['links'] assert_in('/v2/nodes/{}/node_links/{}/'.format(node._id, node._id), field['related']['href'])
def test_archive_success_different_name_same_sha(self): file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory() fake_file2 = file_factory(sha256=fake_file['extra']['hashes']['sha256']) file_tree['children'] = [fake_file, fake_file2] node = factories.NodeFactory(creator=self.user) data = { ('q_' + fake_file['name']): { 'value': fake.word(), 'extra': [{ 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file['name'], 'nodeId': node._id }] } } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) for key, question in registration.registered_meta[schema._id].items(): assert_equal(question['extra'][0]['selectedFileName'], fake_file['name'])
def test_deprecation_warning_for_snake_case(self): user_auth = factories.AuthUserFactory() node = factories.NodeFactory(creator=user_auth) url = '/{}nodes/{}/draft_registrations/?version={}'.format( API_BASE, node._id, KEBAB_CASE_VERSION) schema = RegistrationSchema.objects.get( name='OSF-Standard Pre-Data Collection Registration', schema_version=2) payload = { 'data': { 'type': 'draft_registrations', 'relationships': { 'registration_schema': { 'data': { 'id': schema._id, 'type': 'registration_schemas' } } } } } res = self.app.post_json_api(url, payload, auth=user_auth.auth) assert res.json['data']['type'] == 'draft-registrations' assert res.json['meta'][ 'warning'] == 'As of API Version {0}, all types are now Kebab-case. {0} will accept snake_case, but this will be deprecated in future versions.'.format( KEBAB_CASE_VERSION)
def test_archive_failure_different_name_same_sha(self): file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory() fake_file2 = file_factory(sha256=fake_file['extra']['hashes']['sha256']) file_tree['children'] = [fake_file2] node = factories.NodeFactory(creator=self.user) data = { ('q_' + fake_file['name']): { 'value': fake.word(), 'extra': [{ 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file['name'], 'nodeId': node._id }] } } schema = generate_schema_from_data(data) draft = factories.DraftRegistrationFactory(branched_from=node, registration_schema=schema, registered_metadata=data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory(initiator=registration.creator) draft.registered_node = registration draft.save() with assert_raises(ArchivedFileNotFound): archive_success(registration._id, job._id)
def test_get_or_http_error_by_query_found(self): n = factories.NodeFactory() found = get_or_http_error( Node, (Q('title', 'eq', n.title) & Q('_id', 'eq', n._id)) ) assert found == n
def populate_collection_providers(add_data): for data in COLLECTION_PROVIDERS: _id = data.pop('_id') default_license = data.pop('default_license', False) licenses = [ get_license(name) for name in data.pop('licenses_acceptable', []) ] custom_taxonomy = data.pop('custom_taxonomy', False) primary_collection = data.pop('primary_collection', False) provider, created = CollectionProvider.objects.update_or_create( _id=_id, defaults=data) if licenses: provider.licenses_acceptable.set(licenses) if default_license: provider.default_license = get_license(default_license) if custom_taxonomy and not provider.subjects.exists(): logger.info('Adding custom taxonomy for: {}'.format(_id)) call_command('populate_custom_taxonomies', '--provider', _id, '--type', 'osf.collectionprovider', '--data', json.dumps(custom_taxonomy)) provider_subjects = provider.subjects.all() subjects = provider_subjects if len( provider_subjects) else PreprintProvider.load( 'osf').subjects.all() if primary_collection and not provider.primary_collection: primary_collection['provider'] = provider provider.primary_collection = factories.CollectionFactory( **primary_collection) provider.primary_collection.save() provider.save() if add_data and provider.primary_collection: user = factories.AuthUserFactory() user.save() for _ in range(5): node = factories.NodeFactory() node.is_public = True node.save() status = random.choice( provider.primary_collection.status_choices) collected_type = random.choice( provider.primary_collection.collected_type_choices) cgm = provider.primary_collection.collect_object( node, user, collected_type=collected_type, status=status) rando_subjects = random.sample(subjects, min(len(subjects), 5)) cgm.subjects.add(*rando_subjects) cgm.save() logger.info('{} collection provider: {}'.format( 'Added' if created else 'Updated', _id))
def registration_with_contribs(self, user, user2): proj = factories.NodeFactory(creator=user) proj.add_contributor(user2, permissions.ADMIN) embargo = factories.EmbargoFactory() embargo.end_date = timezone.now() + datetime.timedelta(days=4) return factories.RegistrationFactory(project=proj, creator=user, embargo=embargo)
def test_field_with_two_filters(self): req = make_drf_request_with_version(version='2.0') project = factories.ProjectFactory() node = factories.NodeFactory(parent=project) data = self.BasicNodeSerializer(node, context={'request': req}).data['data'] field = data['relationships']['field_with_filters']['links'] assert_in(urllib.quote('filter[target]=hello', safe='?='), field['related']['href']) assert_in(urllib.quote('filter[woop]=yea', safe='?='), field['related']['href'])
def test_self_and_related_fields(self): req = make_drf_request_with_version(version='2.0') project = factories.ProjectFactory() node = factories.NodeFactory(parent=project) data = self.BasicNodeSerializer(node, context={'request': req}).data['data'] relationship_field = data['relationships']['self_and_related_field']['links'] assert_in('/v2/nodes/{}/contributors/'.format(node._id), relationship_field['self']['href']) assert_in('/v2/nodes/{}/'.format(node._id), relationship_field['related']['href'])
def test_serializing_empty_to_one(self): req = make_drf_request_with_version(version='2.2') node = factories.NodeFactory() data = self.BasicNodeSerializer(node, context={ 'request': req }).data['data'] # This node is not registered_from another node hence it is an empty-to-one. assert 'registered_from' not in data['relationships'] # In 2.9, API returns null for empty relationships # https://openscience.atlassian.net/browse/PLAT-840 req = make_drf_request_with_version(version='2.9') node = factories.NodeFactory() data = self.BasicNodeSerializer(node, context={ 'request': req }).data['data'] assert data['relationships']['registered_from']['data'] is None
def test_serializing_meta(self): req = make_drf_request_with_version(version='2.0') project = factories.ProjectFactory() node = factories.NodeFactory(parent=project) data = self.BasicNodeSerializer(node, context={'request': req}).data['data'] meta = data['relationships']['parent_with_meta']['links']['related']['meta'] assert_not_in('count', meta) assert_in('extra', meta) assert_equal(meta['extra'], 'foo')
def test_autoload(self): target = factories.NodeFactory() def fn(node, *args, **kwargs): return node wrapped = autoload(Node, 'node_id', 'node', fn) found = wrapped(node_id=target._id) assert found == target
def setUp(self): super(TestDateByVersion, self).setUp() self.node = factories.NodeFactory() self.old_date = datetime.utcnow() # naive dates before django-osf self.old_date_without_microseconds = self.old_date.replace(microsecond=0) self.new_date = datetime.utcnow().replace(tzinfo=utc) # non-naive after django-osf self.new_date_without_microseconds = self.new_date.replace(microsecond=0) self.old_format = '%Y-%m-%dT%H:%M:%S.%f' self.old_format_without_microseconds = '%Y-%m-%dT%H:%M:%S' self.new_format = '%Y-%m-%dT%H:%M:%S.%fZ'
def setUp(self): super(ArchiverTestCase, self).setUp() handlers.celery_before_request() self.user = factories.UserFactory() self.auth = Auth(user=self.user) self.src = factories.NodeFactory(creator=self.user) self.dst = factories.RegistrationFactory(user=self.user, project=self.src, send_signals=False) archiver_utils.before_archive(self.dst, self.user) self.archive_job = self.dst.archive_job
def test_field_with_callable_related_attrs(self): req = make_drf_request_with_version(version='2.0') project = factories.ProjectFactory() node = factories.NodeFactory(parent=project) data = self.BasicNodeSerializer(node, context={'request': req}).data['data'] assert_not_in('registered_from', data['relationships']) registration = factories.RegistrationFactory(project=node) data = self.BasicNodeSerializer(registration, context={'request': req}).data['data'] field = data['relationships']['registered_from']['links'] assert_in('/v2/nodes/{}/'.format(node._id), field['related']['href'])
def test_get_file_map_with_components(self): node = factories.NodeFactory() comp1 = factories.NodeFactory(parent=node) factories.NodeFactory(parent=comp1) factories.NodeFactory(parent=node) file_tree = file_tree_factory(3, 3, 3) with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): file_map = archiver_utils.get_file_map(node) stack = [file_tree] file_map = {sha256: value for sha256, value, _ in file_map} while len(stack): item = stack.pop(0) if item['kind'] == 'file': sha256 = item['extra']['hashes']['sha256'] assert_in(sha256, file_map) map_file = file_map[sha256] assert_equal(item, map_file) else: stack = stack + item['children']
def setUp(self): super(TestFileSerializer, self).setUp() self.user = factories.UserFactory() self.node = factories.NodeFactory(creator=self.user) self.file = utils.create_test_file(self.node, self.user) self.date_created = self.file.versions.first().date_created self.date_modified = self.file.versions.last().date_created self.date_created_tz_aware = self.date_created.replace(tzinfo=utc) self.date_modified_tz_aware = self.date_modified.replace(tzinfo=utc) self.new_format = '%Y-%m-%dT%H:%M:%S.%fZ'
def setUp(self): super(TestApiBaseSerializers, self).setUp() self.user = factories.AuthUserFactory() self.auth = factories.Auth(self.user) self.node = factories.ProjectFactory(is_public=True) for i in range(5): factories.ProjectFactory(is_public=True, parent=self.node) self.linked_node = factories.NodeFactory(creator=self.user, is_public=True) self.node.add_pointer(self.linked_node, auth=self.auth) self.url = '/{}nodes/{}/'.format(API_BASE, self.node._id)
def test_archive_tree_finished_with_nodes(self): proj = factories.NodeFactory() factories.NodeFactory(parent=proj) comp2 = factories.NodeFactory(parent=proj) factories.NodeFactory(parent=comp2) reg = factories.RegistrationFactory(project=proj) rchild1 = reg._nodes.first() for node in reg.node_and_primary_descendants(): assert_false(node.archive_job.archive_tree_finished()) for target in rchild1.archive_job.target_addons.all(): rchild1.archive_job.update_target(target.name, ARCHIVER_SUCCESS) rchild1.archive_job.save() assert_false(reg.archive_job.archive_tree_finished()) for node in reg.node_and_primary_descendants(): for target in node.archive_job.target_addons.all(): node.archive_job.update_target(target.name, ARCHIVER_SUCCESS) for node in reg.node_and_primary_descendants(): assert_true(node.archive_job.archive_tree_finished())