def test_send_with_sendgrid_success(self): mock_client = mock.MagicMock() mock_client.send.return_value = 200, 'success' from_addr, to_addr = fake_email(), fake_email() category1, category2 = fake.word(), fake.word() subject = fake.bs() message = fake.text() ret = _send_with_sendgrid( from_addr=from_addr, to_addr=to_addr, subject=subject, message=message, mimetype='html', client=mock_client, categories=(category1, category2) ) assert_true(ret) assert_equal(mock_client.send.call_count, 1) # First call's argument should be a Mail object with # the correct configuration first_call_arg = mock_client.send.call_args[0][0] assert_is_instance(first_call_arg, sendgrid.Mail) assert_equal(first_call_arg.from_email, from_addr) assert_equal(first_call_arg.to[0], to_addr) assert_equal(first_call_arg.subject, subject) assert_in(message, first_call_arg.html) # Categories are set assert_equal(first_call_arg.smtpapi.data['category'], (category1, category2))
def test_send_with_sendgrid_success(self): mock_client = mock.MagicMock() mock_client.send.return_value = 200, 'success' from_addr, to_addr = fake.email(), fake.email() category1, category2 = fake.word(), fake.word() subject = fake.bs() message = fake.text() ret = _send_with_sendgrid(from_addr=from_addr, to_addr=to_addr, subject=subject, message=message, mimetype='txt', client=mock_client, categories=(category1, category2)) assert_true(ret) assert_equal(mock_client.send.call_count, 1) # First call's argument should be a Mail object with # the correct configuration first_call_arg = mock_client.send.call_args[0][0] assert_is_instance(first_call_arg, sendgrid.Mail) assert_equal(first_call_arg.from_email, from_addr) assert_equal(first_call_arg.to[0], to_addr) assert_equal(first_call_arg.subject, subject) assert_equal(first_call_arg.text, message) # Categories are set assert_equal(first_call_arg.smtpapi.data['category'], (category1, category2))
def test_do_not_create_public_wiki_page( self, app, user_creator, user_read_contributor, user_non_contributor, url_node_public, wiki_public ): # test_do_not_create_home_wiki_page res = app.post_json_api(url_node_public, create_wiki_payload('home'), auth=user_creator.auth, expect_errors=True) assert res.status_code == 409 assert res.json['errors'][0]['detail'] == "A wiki page with the name 'home' already exists." # test_do_not_create_wiki_page_name_exists res = app.post_json_api(url_node_public, create_wiki_payload(wiki_public.page_name), auth=user_creator.auth, expect_errors=True) assert res.status_code == 409 assert res.json['errors'][0]['detail'] == "A wiki page with the name '{}' already exists.".format(wiki_public.page_name) # test_do_not_create_public_wiki_page_as_read_contributor res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=user_read_contributor.auth, expect_errors=True) assert res.status_code == 403 # test_do_not_create_public_wiki_page_as_non_contributor res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=user_non_contributor.auth, expect_errors=True) assert res.status_code == 403 # test_do_not_create_public_wiki_page_as_unauthenticated res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), expect_errors=True) assert res.status_code == 401
def test_do_not_create_registration_wiki_page( self, app, user_creator, url_registration_public, url_registration_private ): # test_do_not_create_wiki_on_public_registration res = app.post_json_api(url_registration_public, create_wiki_payload(fake.word()), auth=user_creator.auth, expect_errors=True) assert res.status_code == 405 # test_do_not_create_wiki_on_embargoed_registration res = app.post_json_api(url_registration_private, create_wiki_payload(fake.word()), auth=user_creator.auth, expect_errors=True) assert res.status_code == 405
def test_do_not_create_public_wiki_page(self, app, user_creator, user_read_contributor, user_non_contributor, url_node_public, wiki_public, project_public): # test_do_not_create_home_wiki_page res = app.post_json_api(url_node_public, create_wiki_payload('home'), auth=user_creator.auth, expect_errors=True) assert res.status_code == 409 assert res.json['errors'][0][ 'detail'] == "A wiki page with the name 'home' already exists." # test_do_not_create_wiki_page_name_exists res = app.post_json_api(url_node_public, create_wiki_payload(wiki_public.page_name), auth=user_creator.auth, expect_errors=True) assert res.status_code == 409 assert res.json['errors'][0][ 'detail'] == "A wiki page with the name '{}' already exists.".format( wiki_public.page_name) # test_do_not_create_public_wiki_page_as_read_contributor res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=user_read_contributor.auth, expect_errors=True) assert res.status_code == 403 # test_do_not_create_public_wiki_page_as_non_contributor res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=user_non_contributor.auth, expect_errors=True) assert res.status_code == 403 # test_do_not_create_public_wiki_page_as_read_osf_group_member group_mem = AuthUserFactory() group = OSFGroupFactory(creator=group_mem) project_public.add_osf_group(group, READ) res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=group_mem.auth, expect_errors=True) assert res.status_code == 403 # test_do_not_create_public_wiki_page_as_unauthenticated res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), expect_errors=True) assert res.status_code == 401
def generate_metadata(file_trees, selected_files, node_index): data = {} uploader_types = {('q_' + selected_file['name']): { 'value': fake.word(), 'extra': [{ 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format(node_index[sha256], selected_file['path']), 'selectedFileName': selected_file['name'], 'nodeId': node_index[sha256] }] } for sha256, selected_file in selected_files.items()} data.update(uploader_types) object_types = {('q_' + selected_file['name'] + '_obj'): { 'value': { name_factory(): { 'value': fake.word(), 'extra': [{ 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node_index[sha256], selected_file['path']), 'selectedFileName': selected_file['name'], 'nodeId': node_index[sha256] }] }, name_factory(): { 'value': fake.word() } } } for sha256, selected_file in selected_files.items()} data.update(object_types) other_questions = { 'q{}'.format(i): { 'value': fake.word() } for i in range(5) } data.update(other_questions) return data
def generate_metadata(file_trees, selected_files, node_index): data = {} uploader_types = { ('q_' + selected_file['name']): { 'value': fake.word(), 'extra': [{ 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node_index[sha256], selected_file['path'] ), 'selectedFileName': selected_file['name'], 'nodeId': node_index[sha256] }] } for sha256, selected_file in selected_files.items() } data.update(uploader_types) object_types = { ('q_' + selected_file['name'] + '_obj'): { 'value': { name_factory(): { 'value': fake.word(), 'extra': [{ 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node_index[sha256], selected_file['path'] ), 'selectedFileName': selected_file['name'], 'nodeId': node_index[sha256] }] }, name_factory(): { 'value': fake.word() } } } for sha256, selected_file in selected_files.items() } data.update(object_types) other_questions = { 'q{}'.format(i): { 'value': fake.word() } for i in range(5) } data.update(other_questions) return data
def test_do_not_create_private_wiki_page( self, app, wiki_private, url_node_private, user_read_contributor, user_non_contributor ): # test_do_not_create_private_wiki_page_as_read_contributor res = app.post_json_api(url_node_private, create_wiki_payload(fake.word()), auth=user_read_contributor.auth, expect_errors=True) assert res.status_code == 403 # test_do_not_create_private_wiki_page_as_non_contributor res = app.post_json_api(url_node_private, create_wiki_payload(fake.word()), auth=user_non_contributor.auth, expect_errors=True) assert res.status_code == 403 # test_do_not_create_private_wiki_page_as_unauthenticated res = app.post_json_api(url_node_private, create_wiki_payload(fake.word()), expect_errors=True) assert res.status_code == 401
def test_archive_failure_different_name_same_sha(self): file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory() fake_file2 = file_factory(sha256=fake_file['extra']['hashes']['sha256']) file_tree['children'] = [fake_file2] node = factories.NodeFactory(creator=self.user) data = { ('q_' + fake_file['name']): { 'value': fake.word(), 'extra': [{ 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file['name'], 'nodeId': node._id }] } } schema = generate_schema_from_data(data) draft = factories.DraftRegistrationFactory(branched_from=node, registration_schema=schema, registered_metadata=data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory(initiator=registration.creator) draft.registered_node = registration draft.save() with assert_raises(ArchivedFileNotFound): archive_success(registration._id, job._id)
def test_create_public_wiki_page_with_empty_content(self, app, user_write_contributor, url_node_public, project_public): page_name = fake.word() payload = create_wiki_payload(page_name) payload['data']['attributes']['content'] = '' res = app.post_json_api(url_node_public, payload, auth=user_write_contributor.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'This field may not be blank.'
def test_archive_success_different_name_same_sha(self): file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory() fake_file2 = file_factory(sha256=fake_file['extra']['hashes']['sha256']) file_tree['children'] = [fake_file, fake_file2] node = factories.NodeFactory(creator=self.user) data = { ('q_' + fake_file['name']): { 'value': fake.word(), 'extra': [{ 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file['name'], 'nodeId': node._id }] } } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) for key, question in registration.registered_meta[schema._id].items(): assert_equal(question['extra'][0]['selectedFileName'], fake_file['name'])
def test_archive_success_same_file_in_component(self): file_tree = file_tree_factory(3, 3, 3) selected = select_files_from_tree(file_tree).values()[0] child_file_tree = file_tree_factory(0, 0, 0) child_file_tree['children'] = [selected] node = factories.NodeFactory(creator=self.user) child = factories.NodeFactory(creator=self.user, parent=node) data = { ('q_' + selected['name']): { 'value': fake.word(), 'extra': [{ 'sha256': selected['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( child._id, selected['path'] ), 'selectedFileName': selected['name'], 'nodeId': child._id }] } } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) child_reg = registration.nodes[0] for key, question in registration.registered_meta[schema._id].items(): assert_in(child_reg._id, question['extra'][0]['viewUrl'])
def test_archive_success_same_file_in_component(self): file_tree = file_tree_factory(3, 3, 3) selected = select_files_from_tree(file_tree).values()[0] child_file_tree = file_tree_factory(0, 0, 0) child_file_tree['children'] = [selected] node = factories.NodeFactory(creator=self.user) child = factories.NodeFactory(creator=self.user, parent=node) data = { ('q_' + selected['name']): { 'value': fake.word(), 'extra': [{ 'sha256': selected['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( child._id, selected['path'] ), 'selectedFileName': selected['name'], 'nodeId': child._id }] } } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) registration.reload() child_reg = registration.nodes[0] for key, question in registration.registered_meta[schema._id].items(): assert_in(child_reg._id, question['extra'][0]['viewUrl'])
def test_archive_success_escaped_file_names(self): file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory(name='>and&and<') fake_file_name = strip_html(fake_file['name']) file_tree['children'] = [fake_file] node = factories.NodeFactory(creator=self.user) data = { ('q_' + fake_file_name): { 'value': fake.word(), 'extra': [{ 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file_name, 'nodeId': node._id }] } } schema = generate_schema_from_data(data) draft = factories.DraftRegistrationFactory(branched_from=node, registration_schema=schema, registered_metadata=data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) registration.reload() for key, question in registration.registered_meta[schema._id].items(): assert_equal(question['extra'][0]['selectedFileName'], fake_file_name)
def test_archive_success_different_name_same_sha(self): file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory() fake_file2 = file_factory(sha256=fake_file['extra']['hashes']['sha256']) file_tree['children'] = [fake_file, fake_file2] node = factories.NodeFactory(creator=self.user) data = { ('q_' + fake_file['name']): { 'value': fake.word(), 'extra': [{ 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file['name'], 'nodeId': node._id }] } } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) for key, question in registration.registered_meta[schema._id].items(): assert_equal(question['extra'][0]['selectedFileName'], fake_file['name'])
def create_fake_project(creator, n_users, privacy, n_components, name, n_tags, presentation_name, is_registration): auth = Auth(user=creator) project_title = name if name else fake.sentence() if not is_registration: project = ProjectFactory(title=project_title, description=fake.paragraph(), creator=creator) else: project = RegistrationFactory(title=project_title, description=fake.paragraph(), creator=creator) project.set_privacy(privacy) for _ in range(n_users): contrib = create_fake_user() project.add_contributor(contrib, auth=auth) if isinstance(n_components, int): for _ in range(n_components): NodeFactory(project=project, title=fake.sentence(), description=fake.paragraph(), creator=creator) elif isinstance(n_components, list): render_generations_from_node_structure_list(project, creator, n_components) for _ in range(n_tags): project.add_tag(fake.word(), auth=auth) if presentation_name is not None: project.add_tag(presentation_name, auth=auth) project.add_tag('poster', auth=auth) project.save() return project
def test_archive_success_different_name_same_sha(self): ensure_schemas() file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory() fake_file2 = file_factory(sha256=fake_file['extra']['hashes']['sha256']) file_tree['children'] = [fake_file, fake_file2] node = factories.NodeFactory(creator=self.user) prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) data = { ('q_' + fake_file['name']): { 'value': fake.word(), 'extra': { 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file['name'], 'nodeId': node._id } } } with test_utils.mock_archive(node, schema=prereg_schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) for key, question in registration.registered_meta[prereg_schema._id].items(): assert_equal(question['extra']['selectedFileName'], fake_file['name'])
def test_archive_success_with_deeply_nested_schema(self): node = factories.NodeFactory(creator=self.user) file_trees, selected_files, node_index = generate_file_tree([node]) data = { ('q_' + selected_file['name']): { 'value': fake.word(), 'extra': [{ 'selectedFileName': selected_file['name'], 'nodeId': node._id, 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format(node._id, selected_file['path']) }] } for sha256, selected_file in selected_files.items() } schema = generate_schema_from_data(data) with test_utils.mock_archive(node, schema=schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(BaseStorageAddon, '_get_file_tree', mock.Mock(return_value=file_trees[node._id])): job = factories.ArchiveJobFactory(initiator=registration.creator) archive_success(registration._id, job._id) registration.reload() for key, question in registration.registered_meta[schema._id].items(): target = None if isinstance(question['value'], dict): target = [v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra'][0]][0] elif 'extra' in question and 'sha256' in question['extra'][0]: target = question if target: assert_in(registration._id, target['extra'][0]['viewUrl']) assert_not_in(node._id, target['extra'][0]['viewUrl']) del selected_files[target['extra'][0]['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) assert_false(selected_files)
def wiki_registration_private(self, project_public, user_creator): registration = RegistrationFactory(project=project_public, is_public=False) wiki_page = WikiFactory(node=registration, user=user_creator, page_name=fake.word()) WikiVersionFactory(wiki_page=wiki_page, user=user_creator) return wiki_page
def test_create_private_wiki_page(self, app, user_write_contributor, url_node_private): page_name = fake.word() res = app.post_json_api(url_node_private, create_wiki_payload(page_name), auth=user_write_contributor.auth) assert res.status_code == 201 assert res.json['data']['attributes']['name'] == page_name
def test_do_not_create_wiki_page_if_publicly_editable_non_contrib( self, app, user_creator, user_non_contributor, project_public, url_node_public, wiki_public ): project_public.addons_wiki_node_settings.set_editing(True, auth=Auth(user_creator)) page_name = fake.word() res = app.post_json_api(url_node_public, create_wiki_payload(page_name), auth=user_non_contributor.auth, expect_errors=True) assert res.status_code == 403
def test_do_not_create_wiki_page_if_disabled( self, app, user_creator, project_public, url_node_public, wiki_public ): project_public.delete_addon('wiki', auth=Auth(user_creator)) page_name = fake.word() res = app.post_json_api(url_node_public, create_wiki_payload(page_name), auth=user_creator.auth, expect_errors=True) assert res.status_code == 404
def test_create_public_wiki_page_with_content(self, app, user_write_contributor, url_node_public, project_public): page_name = fake.word() payload = create_wiki_payload(page_name) payload['data']['attributes']['content'] = 'my first wiki page' res = app.post_json_api(url_node_public, payload, auth=user_write_contributor.auth) assert res.status_code == 201 assert res.json['data']['attributes']['name'] == page_name wiki_page = WikiPage.objects.get_for_node(project_public, page_name) assert wiki_page.get_version().content == 'my first wiki page'
def folder_factory(depth, num_files, num_folders, path_above): new_path = os.path.join(path_above.rstrip('/'), fake.word()) return { 'path': new_path, 'kind': 'folder', 'children': [file_factory() for i in range(num_files)] + [folder_factory(depth - 1, num_files, num_folders, new_path)] if depth > 0 else [] }
def make_rename_payload(wiki_page): new_page_name = fake.word() payload = { 'data': { 'id': wiki_page._id, 'type': 'wikis', 'attributes': { 'name': new_page_name } } } return payload, new_page_name
def folder_factory(depth, num_files, num_folders, path_above): new_path = os.path.join(path_above.rstrip('/'), fake.word()) return { 'path': new_path, 'kind': 'folder', 'children': [ file_factory() for i in range(num_files) ] + [ folder_factory(depth - 1, num_files, num_folders, new_path) ] if depth > 0 else [] }
def test_create_public_wiki_page_with_content(self, app, user_write_contributor, url_node_public, project_public): page_name = fake.word() payload = create_wiki_payload(page_name) payload['data']['attributes']['content'] = 'my first wiki page' res = app.post_json_api(url_node_public, payload, auth=user_write_contributor.auth) assert res.status_code == 201 assert res.json['data']['attributes']['name'] == page_name wiki_page = WikiPage.objects.get_for_node(project_public, page_name) assert wiki_page.get_version().content == 'my first wiki page' # test_osf_group_member_write group_mem = AuthUserFactory() group = OSFGroupFactory(creator=group_mem) project_public.add_osf_group(group, WRITE) res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=group_mem.auth, expect_errors=True) assert res.status_code == 201
def test_archive_success_same_file_in_component(self): ensure_schemas() file_tree = file_tree_factory(3, 3, 3) selected = select_files_from_tree(file_tree).values()[0] child_file_tree = file_tree_factory(0, 0, 0) child_file_tree['children'] = [selected] node = factories.NodeFactory(creator=self.user) child = factories.NodeFactory(creator=self.user, parent=node) prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) data = { ('q_' + selected['name']): { 'value': fake.word(), 'extra': { 'sha256': selected['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( child._id, selected['path']), 'selectedFileName': selected['name'], 'nodeId': child._id } } } with test_utils.mock_archive(node, schema=prereg_schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) child_reg = registration.nodes[0] for key, question in registration.registered_meta[ prereg_schema._id].items(): assert_in(child_reg._id, question['extra']['viewUrl'])
def create_fake_project(creator, n_users, privacy, n_components, name, n_tags, presentation_name, is_registration): auth = Auth(user=creator) project_title = name if name else fake.sentence() if not is_registration: project = ProjectFactory( title=project_title, description=fake.paragraph(), creator=creator ) else: project = RegistrationFactory( title=project_title, description=fake.paragraph(), creator=creator ) project.set_privacy(privacy) for _ in range(n_users): contrib = create_fake_user() project.add_contributor(contrib, auth=auth) if isinstance(n_components, int): for _ in range(n_components): NodeFactory( project=project, title=fake.sentence(), description=fake.paragraph(), creator=creator ) elif isinstance(n_components, list): render_generations_from_node_structure_list(project, creator, n_components) for _ in range(n_tags): project.add_tag(fake.word(), auth=auth) if presentation_name is not None: project.add_tag(presentation_name, auth=auth) project.add_tag('poster', auth=auth) project.save() return project
def wiki_public(self, project_public, user_creator): wiki_page = WikiFactory(node=project_public, user=user_creator, page_name=fake.word()) WikiVersionFactory(wiki_page=wiki_page, user=user_creator) return wiki_page
def wiki_registration_private(self, project_public, user_creator): registration = RegistrationFactory(project=project_public, is_public=False) wiki_page = WikiFactory(node=registration, user=user_creator, page_name=fake.word()) wiki_version = WikiVersionFactory(wiki_page=wiki_page, user=user_creator) return wiki_page
def test_archive_success(self): ensure_schemas() file_tree = file_tree_factory(3, 3, 3) selected_files = select_files_from_tree(file_tree) node = factories.NodeFactory(creator=self.user) prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) data = { ('q_' + selected_file['name']): { 'value': fake.word(), 'extra': { 'selectedFileName': selected_file['name'], 'nodeId': node._id, 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format(node._id, selected_file['path']) } } for sha256, selected_file in selected_files.items() } object_types = { ('q_' + selected_file['name'] + '_obj'): { 'value': { name_factory(): { 'value': fake.word(), 'extra': { 'selectedFileName': selected_file['name'], 'nodeId': node._id, 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format(node._id, selected_file['path']) } }, name_factory(): { 'value': fake.word() } } } for sha256, selected_file in selected_files.items() } data.update(copy.deepcopy(object_types)) other_questions = { 'q{}'.format(i): { 'value': fake.word() } for i in range(5) } data.update(other_questions) with test_utils.mock_archive(node, schema=prereg_schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) for key, question in registration.registered_meta[prereg_schema._id].items(): target = None if isinstance(question['value'], dict): target = [v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra']][0] elif 'extra' in question and 'hashes' in question['extra']: target = question if target: assert_in(registration._id, target['extra']['viewUrl']) assert_not_in(node._id, target['extra']['viewUrl']) del selected_files[target['extra']['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) assert_false(selected_files)
def wiki_private(self, project_private, user_creator): wiki_page = WikiFactory(node=project_private, user=user_creator, page_name=fake.word()) wiki_version = WikiVersionFactory(wiki_page=wiki_page, user=user_creator) return wiki_page
def test_archive_success_with_components(self): ensure_schemas() node = factories.NodeFactory(creator=self.user) comp1 = factories.NodeFactory(parent=node, creator=self.user) factories.NodeFactory(parent=comp1, creator=self.user) factories.NodeFactory(parent=node, creator=self.user) nodes = [n for n in node.node_and_primary_descendants()] file_trees = { n._id: file_tree_factory(3, 3, 3) for n in nodes } selected_files = {} selected_file_node_index = {} for n in nodes: file_tree = file_trees[n._id] selected = select_files_from_tree(file_tree) selected_file_node_index.update({ sha256: n._id for sha256 in selected.keys() }) selected_files.update(selected) # select files from each Node prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) data = { ('q_' + selected_file['name']): { 'value': fake.word(), 'extra': { 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( selected_file_node_index[sha256], selected_file['path'] ), 'selectedFileName': selected_file['name'], 'nodeId': selected_file_node_index[sha256] } } for sha256, selected_file in selected_files.items() } object_types = { ('q_' + selected_file['name'] + '_obj'): { 'value': { name_factory(): { 'value': fake.word(), 'extra': { 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( selected_file_node_index[sha256], selected_file['path'] ), 'selectedFileName': selected_file['name'], 'nodeId': selected_file_node_index[sha256] } }, name_factory(): { 'value': fake.word() } } } for sha256, selected_file in selected_files.items() } data.update(object_types) other_questions = { 'q{}'.format(i): { 'value': fake.word() } for i in range(5) } data.update(other_questions) with test_utils.mock_archive(node, schema=prereg_schema, data=copy.deepcopy(data), autocomplete=True, autoapprove=True) as registration: patches = [] for n in registration.node_and_primary_descendants(): file_tree = file_trees[n.registered_from._id] osfstorage = n.get_addon('osfstorage') patch = mock.patch.object( osfstorage, '_get_file_tree', mock.Mock(return_value=file_tree) ) patch.start() patches.append(patch) job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) for key, question in registration.registered_meta[prereg_schema._id].items(): target = None if isinstance(question['value'], dict): target = [v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra']][0] elif 'extra' in question and 'sha256' in question['extra']: target = question if target: node_id = re.search( r'^/project/(?P<node_id>\w{5}).+$', target['extra']['viewUrl'] ).groupdict()['node_id'] assert_in( node_id, [r._id for r in registration.node_and_primary_descendants()] ) if target['extra']['sha256'] in selected_files: del selected_files[target['extra']['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) # ensure each selected file was checked assert_false(selected_files) for patch in patches: patch.stop()