def get_metaschemas(*args, **kwargs): """ List metaschemas with which a draft registration may be created. Only fetch the newest version for each schema. :return: serialized metaschemas :rtype: dict """ count = request.args.get('count', 100) include = request.args.get('include', 'latest') meta_schema_collection = database['metaschema'] meta_schemas = [] if include == 'latest': schema_names = meta_schema_collection.distinct('name') for name in schema_names: meta_schema_set = MetaSchema.find( Q('name', 'eq', name) & Q('schema_version', 'eq', 2) ) meta_schemas = meta_schemas + [s for s in meta_schema_set] else: meta_schemas = MetaSchema.find() meta_schemas = [ schema for schema in meta_schemas if schema.name in ACTIVE_META_SCHEMAS ] meta_schemas.sort(key=lambda a: ACTIVE_META_SCHEMAS.index(a.name)) return { 'meta_schemas': [ serialize_meta_schema(ms) for ms in meta_schemas[:count] ] }, http.OK
def test_ensure_schemas(self): # Should be zero MetaSchema records to begin with assert_equal(MetaSchema.find().count(), 0) ensure_schemas() assert_equal(MetaSchema.find().count(), len(OSF_META_SCHEMAS))
def test_metaschema_uniqueness_is_enforced_in_the_database(self): MetaSchema(name='foo', schema={'foo': 42}, schema_version=1).save() assert_raises( ValidationError, MetaSchema(name='foo', schema={ 'bar': 24 }, schema_version=1).save)
def get_default_metaschema(): """This needs to be a method so it gets called after the test database is set up""" try: return MetaSchema.find()[0] except IndexError: ensure_schemas() return MetaSchema.find()[0]
def setUp(self): super(TestMigrateSchemas, self).setUp() self.regular_old_node = factories.NodeFactory() self.open_ended_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[0]) & Q('schema_version', 'eq', 1)) self.open_ended = self._make_registration(self.open_ended_schema) self.standard_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[1]) & Q('schema_version', 'eq', 1)) self.standard = self._make_registration(self.standard_schema) self.brandt_pre_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[2]) & Q('schema_version', 'eq', 1)) self.brandt_pre = self._make_registration(self.brandt_pre_schema) self.brandt_post_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[3]) & Q('schema_version', 'eq', 1)) self.brandt_post = self._make_registration(self.brandt_post_schema) self.multiple = self._make_registration( [self.brandt_pre_schema, self.brandt_post_schema]) self.confirmatory_schema = MetaSchema.find_one( Q('name', 'eq', 'Confirmatory - General')) self.confirmatory = self._make_registration(self.confirmatory_schema) self.db['node'].update({}, {'$set': { 'registered_schema': None }}, multi=True)
def get_metaschemas(*args, **kwargs): """ List metaschemas with which a draft registration may be created. Only fetch the newest version for each schema. :return: serialized metaschemas :rtype: dict """ count = request.args.get('count', 100) include = request.args.get('include', 'latest') meta_schema_collection = database['metaschema'] meta_schemas = [] if include == 'latest': schema_names = meta_schema_collection.distinct('name') for name in schema_names: meta_schema_set = MetaSchema.find( Q('name', 'eq', name) & Q('schema_version', 'eq', 2)) meta_schemas = meta_schemas + [s for s in meta_schema_set] else: meta_schemas = MetaSchema.find() meta_schemas = [ schema for schema in meta_schemas if schema.name in ACTIVE_META_SCHEMAS ] meta_schemas.sort(key=lambda a: ACTIVE_META_SCHEMAS.index(a.name)) return { 'meta_schemas': [serialize_meta_schema(ms) for ms in meta_schemas[:count]] }, http.OK
def generate_schema_from_data(data): def from_property(id, prop): if isinstance(prop.get('value'), dict): return { 'id': id, 'type': 'object', 'properties': [ from_property(pid, sp) for pid, sp in prop['value'].items() ] } else: return { 'id': id, 'type': 'osf-upload' if prop.get('extra') else 'string' } def from_question(qid, question): if q.get('extra'): return { 'qid': qid, 'type': 'osf-upload' } elif isinstance(q.get('value'), dict): return { 'qid': qid, 'type': 'object', 'properties': [ from_property(id, value) for id, value in question.get('value').items() ] } else: return { 'qid': qid, 'type': 'string' } schema = MetaSchema( name='Test', schema={ 'name': "Test", 'version': 2, 'config': { 'hasFiles': True }, 'pages': [{ 'id': 'page1', 'questions': [ from_question(qid, q) for qid, q in data.items() ] }] }, ) schema.save() return schema
def test_ensure_schemas(self): # Should be zero MetaSchema records to begin with assert_equal( MetaSchema.find().count(), 0 ) ensure_schemas() assert_equal( MetaSchema.find().count(), len(OSF_META_SCHEMAS) )
def test_archive_success_different_name_same_sha(self): ensure_schemas() file_tree = file_tree_factory(0, 0, 0) fake_file = file_factory() fake_file2 = file_factory(sha256=fake_file['extra']['hashes']['sha256']) file_tree['children'] = [fake_file, fake_file2] node = factories.NodeFactory(creator=self.user) prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) data = { ('q_' + fake_file['name']): { 'value': fake.word(), 'extra': { 'sha256': fake_file['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( node._id, fake_file['path'] ), 'selectedFileName': fake_file['name'], 'nodeId': node._id } } } with test_utils.mock_archive(node, schema=prereg_schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) for key, question in registration.registered_meta[prereg_schema._id].items(): assert_equal(question['extra']['selectedFileName'], fake_file['name'])
def test_inactive_metaschema_not_returned(self): self.schema = MetaSchema.find_one( Q('name', 'eq', 'Open-Ended Registration') & Q('schema_version', 'eq', 1)) self.url = '/{}metaschemas/{}/'.format(API_BASE, self.schema._id) res = self.app.get(self.url, auth=self.user.auth, expect_errors=True) assert_equal(res.status_code, 404)
def check_access(node, auth, action, cas_resp): """Verify that user can perform requested action on resource. Raise appropriate error code if action cannot proceed. """ permission = permission_map.get(action, None) if permission is None: raise HTTPError(httplib.BAD_REQUEST) if cas_resp: if permission == 'read': if node.is_public: return True required_scope = oauth_scopes.CoreScopes.NODE_FILE_READ else: required_scope = oauth_scopes.CoreScopes.NODE_FILE_WRITE if not cas_resp.authenticated \ or required_scope not in oauth_scopes.normalize_scopes(cas_resp.attributes['accessTokenScope']): raise HTTPError(httplib.FORBIDDEN) if permission == 'read' and node.can_view(auth): return True if permission == 'write' and node.can_edit(auth): return True # Users attempting to register projects with components might not have # `write` permissions for all components. This will result in a 403 for # all `copyto` actions as well as `copyfrom` actions if the component # in question is not public. To get around this, we have to recursively # check the node's parent node to determine if they have `write` # permissions up the stack. # TODO(hrybacki): is there a way to tell if this is for a registration? # All nodes being registered that receive the `copyto` action will have # `node.is_registration` == True. However, we have no way of telling if # `copyfrom` actions are originating from a node being registered. # TODO This is raise UNAUTHORIZED for registrations that have not been archived yet if action == 'copyfrom' or (action == 'copyto' and node.is_registration): parent = node.parent_node while parent: if parent.can_edit(auth): return True parent = parent.parent_node # Users with the PREREG_ADMIN_TAG should be allowed to download files # from prereg challenge draft registrations. try: prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) allowed_nodes = [node] + node.parents prereg_draft_registration = DraftRegistration.find( Q('branched_from', 'in', [n._id for n in allowed_nodes]) & Q('registration_schema', 'eq', prereg_schema)) if action == 'download' and \ auth.user is not None and \ prereg_draft_registration.count() > 0 and \ settings.PREREG_ADMIN_TAG in auth.user.system_tags: return True except NoResultsFound: pass raise HTTPError(httplib.FORBIDDEN if auth.user else httplib.UNAUTHORIZED)
def draft_reg_util(): DraftRegistration.remove() ensure_schemas() return MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) )
def setUp(self): super(TestMetaSchemaDetail, self).setUp() self.user = AuthUserFactory() ensure_schemas() self.schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', LATEST_SCHEMA_VERSION)) self.url = '/{}metaschemas/{}/'.format(API_BASE, self.schema._id)
def get_queryset(self): prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) query = ( Q('registration_schema', 'eq', prereg_schema) & Q('approval', 'ne', None) ) return DraftRegistration.find(query).sort(self.ordering)
def test_has_project_and_draft_registration(self): prereg_schema = MetaSchema.find_one(Q('name', 'eq', 'Prereg Challenge')) factories.DraftRegistrationFactory(initiator=self.user, registration_schema=prereg_schema) assert_equal(landing_page(user=self.user), { 'has_projects': True, 'has_draft_registrations': True, })
def test_migrate_registration_schemas(self): target_nodes = self.db['node'].find({'is_registration': True}) do_migration(_db=self.db) for node in target_nodes: for meta_schema_id in node['registered_schema']: meta_schema = MetaSchema.load(meta_schema_id) old_data = OLD_META[meta_schema.name] for key, value in old_data.iteritems(): assert_equal( node['registered_meta'][meta_schema._id][key]['value'], value)
def test_migrate_registration_schemas(self): target_nodes = self.db['node'].find({'is_registration': True}) do_migration(_db=self.db) for node in target_nodes: for meta_schema_id in node['registered_schema']: meta_schema = MetaSchema.load(meta_schema_id) old_data = OLD_META[meta_schema.name] for key, value in old_data.iteritems(): assert_equal( node['registered_meta'][meta_schema._id][key]['value'], value )
def get_prereg_drafts(user=None, filters=tuple()): prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) query = (Q('registration_schema', 'eq', prereg_schema) & Q('approval', 'ne', None)) if user: pass # TODO: filter by assignee; this requires multiple levels of Prereg admins- # one level that can see all drafts, and another than can see only the ones they're assigned. # As a followup to this, we need to make sure this applies to approval/rejection/commenting endpoints # query = query & Q('_metaschema_flags.assignee', 'eq', user._id) return sorted(DraftRegistration.find(query), key=operator.attrgetter('approval.initiation_date'))
def setUpPrereg(self): ensure_schemas() self.prereg_challenge_admin_user = AuthUserFactory() self.prereg_challenge_admin_user.system_tags.append(settings.PREREG_ADMIN_TAG) self.prereg_challenge_admin_user.save() prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) # import ipdb; ipdb.set_trace() self.draft_registration = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema )
def test_has_project_and_draft_registration(self): prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') ) factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema ) assert_equal( landing_page(user=self.user), { 'has_projects': True, 'has_draft_registrations': True, } )
def test_drafts_for_user_omits_registered(self): prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) d1 = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema) d2 = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema) d3 = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema) d1.registered_node = factories.RegistrationFactory() d1.save() drafts = drafts_for_user(self.user, 'prereg') for d in drafts: assert_in(d._id, (d2._id, d3._id)) assert_not_equal(d._id, d1._id)
def setUp(self): super(TestCheckPreregAuth, self).setUp() ensure_schemas() self.prereg_challenge_admin_user = AuthUserFactory() self.prereg_challenge_admin_user.system_tags.append(settings.PREREG_ADMIN_TAG) self.prereg_challenge_admin_user.save() prereg_schema = MetaSchema.find_one(Q("name", "eq", "Prereg Challenge") & Q("schema_version", "eq", 2)) self.user = AuthUserFactory() self.node = factories.ProjectFactory(creator=self.user) self.parent = factories.ProjectFactory() self.child = factories.NodeFactory(parent=self.parent) self.draft_registration = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema, branched_from=self.parent )
def get_prereg_drafts(user=None, filters=tuple()): prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) query = ( Q('registration_schema', 'eq', prereg_schema) & Q('approval', 'ne', None) ) if user: pass # TODO: filter by assignee; this requires multiple levels of Prereg admins- # one level that can see all drafts, and another than can see only the ones they're assigned. # As a followup to this, we need to make sure this applies to approval/rejection/commenting endpoints # query = query & Q('_metaschema_flags.assignee', 'eq', user._id) return sorted( DraftRegistration.find(query), key=operator.attrgetter('approval.initiation_date') )
def test_archive_success_same_file_in_component(self): ensure_schemas() file_tree = file_tree_factory(3, 3, 3) selected = select_files_from_tree(file_tree).values()[0] child_file_tree = file_tree_factory(0, 0, 0) child_file_tree['children'] = [selected] node = factories.NodeFactory(creator=self.user) child = factories.NodeFactory(creator=self.user, parent=node) prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) data = { ('q_' + selected['name']): { 'value': fake.word(), 'extra': { 'sha256': selected['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( child._id, selected['path']), 'selectedFileName': selected['name'], 'nodeId': child._id } } } with test_utils.mock_archive(node, schema=prereg_schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) child_reg = registration.nodes[0] for key, question in registration.registered_meta[ prereg_schema._id].items(): assert_in(child_reg._id, question['extra']['viewUrl'])
def _create(cls, *args, **kwargs): branched_from = kwargs.get("branched_from") initiator = kwargs.get("initiator") registration_schema = kwargs.get("registration_schema") registration_metadata = kwargs.get("registration_metadata") if not branched_from: project_params = {} if initiator: project_params["creator"] = initiator branched_from = ProjectFactory(**project_params) initiator = branched_from.creator try: registration_schema = registration_schema or MetaSchema.find()[0] except IndexError: ensure_schemas() registration_metadata = registration_metadata or {} draft = DraftRegistration.create_from_node( branched_from, user=initiator, schema=registration_schema, data=registration_metadata ) return draft
def setUp(self): super(TestMigrateSchemas, self).setUp() MetaSchema.remove() ensure_schemas() self.regular_old_node = factories.NodeFactory() self.open_ended_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[0]) & Q('schema_version', 'eq', 1) ) self.open_ended = self._make_registration(self.open_ended_schema) self.standard_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[1]) & Q('schema_version', 'eq', 1) ) self.standard = self._make_registration(self.standard_schema) self.brandt_pre_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[2]) & Q('schema_version', 'eq', 1) ) self.brandt_pre = self._make_registration(self.brandt_pre_schema) self.brandt_post_schema = MetaSchema.find_one( Q('name', 'eq', SCHEMA_NAMES[3]) & Q('schema_version', 'eq', 1) ) self.brandt_post = self._make_registration(self.brandt_post_schema) self.multiple = self._make_registration([ self.brandt_pre_schema, self.brandt_post_schema ]) self.confirmatory_schema = MetaSchema.find_one( Q('name', 'eq', 'Confirmatory - General') ) self.confirmatory = self._make_registration(self.confirmatory_schema) self.db['node'].update({}, {'$set': {'registered_schema': None}}, multi=True)
def setUp(self): super(TestCheckPreregAuth, self).setUp() ensure_schemas() self.prereg_challenge_admin_user = AuthUserFactory() self.prereg_challenge_admin_user.system_tags.append( settings.PREREG_ADMIN_TAG) self.prereg_challenge_admin_user.save() prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) self.user = AuthUserFactory() self.node = factories.ProjectFactory(creator=self.user) self.parent = factories.ProjectFactory() self.child = factories.NodeFactory(parent=self.parent) self.draft_registration = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema, branched_from=self.parent)
def _create(cls, *args, **kwargs): branched_from = kwargs.get('branched_from') initiator = kwargs.get('initiator') registration_schema = kwargs.get('registration_schema') registration_metadata = kwargs.get('registration_metadata') if not branched_from: project_params = {} if initiator: project_params['creator'] = initiator branched_from = ProjectFactory(**project_params) initiator = branched_from.creator try: registration_schema = registration_schema or MetaSchema.find()[0] except IndexError: ensure_schemas() registration_metadata = registration_metadata or {} draft = DraftRegistration.create_from_node( branched_from, user=initiator, schema=registration_schema, data=registration_metadata, ) return draft
def test_archive_success_same_file_in_component(self): ensure_schemas() file_tree = file_tree_factory(3, 3, 3) selected = select_files_from_tree(file_tree).values()[0] child_file_tree = file_tree_factory(0, 0, 0) child_file_tree['children'] = [selected] node = factories.NodeFactory(creator=self.user) child = factories.NodeFactory(creator=self.user, parent=node) prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) data = { ('q_' + selected['name']): { 'value': fake.word(), 'extra': { 'sha256': selected['extra']['hashes']['sha256'], 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( child._id, selected['path'] ), 'selectedFileName': selected['name'], 'nodeId': child._id } } } with test_utils.mock_archive(node, schema=prereg_schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) child_reg = registration.nodes[0] for key, question in registration.registered_meta[prereg_schema._id].items(): assert_in(child_reg._id, question['extra']['viewUrl'])
def test_drafts_for_user_omits_registered(self): prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) d1 = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema ) d2 = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema ) d3 = factories.DraftRegistrationFactory( initiator=self.user, registration_schema=prereg_schema ) d1.registered_node = factories.ProjectFactory() d1.save() drafts = drafts_for_user(self.user, 'prereg') for d in drafts: assert_in(d._id, (d2._id, d3._id)) assert_not_equal(d._id, d1._id)
def test_metaschema_is_fine_with_same_name_but_different_version(self): MetaSchema(name='foo', schema_version=1).save() MetaSchema(name='foo', schema_version=2).save() assert_equal(MetaSchema.find(name='foo').count(), 2)
def archive_success(dst_pk, job_pk): """Archiver's final callback. For the time being the use case for this task is to rewrite references to files selected in a registration schema (the Prereg Challenge being the first to expose this feature). The created references point to files on the registered_from Node (needed for previewing schema data), and must be re-associated with the corresponding files in the newly created registration. :param str dst_pk: primary key of registration Node note:: At first glance this task makes redundant calls to utils.get_file_map (which returns a generator yielding (<sha256>, <file_metadata>) pairs) on the dst Node. Two notes about utils.get_file_map: 1) this function memoizes previous results to reduce overhead and 2) this function returns a generator that lazily fetches the file metadata of child Nodes (it is possible for a selected file to belong to a child Node) using a non-recursive DFS. Combined this allows for a relatively effient implementation with seemingly redundant calls. """ create_app_context() dst = Node.load(dst_pk) # The filePicker extension addded with the Prereg Challenge registration schema # allows users to select files in OSFStorage as their response to some schema # questions. These files are references to files on the unregistered Node, and # consequently we must migrate those file paths after archiver has run. Using # sha256 hashes is a convenient way to identify files post-archival. prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) missing_files = [] if prereg_schema in dst.registered_schema: prereg_metadata = dst.registered_meta[prereg_schema._id] updated_metadata = {} for key, question in prereg_metadata.items(): if isinstance(question['value'], dict): for subkey, subvalue in question['value'].items(): registration_file = None if subvalue.get('extra', {}).get('sha256'): registration_file, node_id = find_registration_file(subvalue, dst) if not registration_file: missing_files.append({ 'file_name': subvalue['extra']['selectedFileName'], 'question_title': find_question(prereg_schema.schema, key)['title'] }) continue subvalue['extra'].update({ 'viewUrl': VIEW_FILE_URL_TEMPLATE.format(node_id=node_id, path=registration_file['path'].lstrip('/')) }) question['value'][subkey] = subvalue else: if question.get('extra', {}).get('sha256'): registration_file, node_id = find_registration_file(question, dst) if not registration_file: missing_files.append({ 'file_name': question['extra']['selectedFileName'], 'question_title': find_question(prereg_schema.schema, key)['title'] }) continue question['extra'].update({ 'viewUrl': VIEW_FILE_URL_TEMPLATE.format(node_id=node_id, path=registration_file['path'].lstrip('/')) }) updated_metadata[key] = question if missing_files: raise ArchivedFileNotFound( registration=dst, missing_files=missing_files ) prereg_metadata.update(updated_metadata) dst.registered_meta[prereg_schema._id] = prereg_metadata dst.save() job = ArchiveJob.load(job_pk) if not job.sent: job.sent = True job.save() dst.sanction.ask(dst.get_active_contributors_recursive(unique_users=True))
def generate_schema_from_data(data): def from_property(id, prop): if isinstance(prop.get('value'), dict): return { 'id': id, 'type': 'object', 'properties': [ from_property(pid, sp) for pid, sp in prop['value'].items() ] } else: return { 'id': id, 'type': 'osf-upload' if prop.get('extra') else 'string' } def from_question(qid, question): if q.get('extra'): return { 'qid': qid, 'type': 'osf-upload' } elif isinstance(q.get('value'), dict): return { 'qid': qid, 'type': 'object', 'properties': [ from_property(id, value) for id, value in question.get('value').items() ] } else: return { 'qid': qid, 'type': 'string' } _schema = { 'name': 'Test', 'version': 2, 'config': { 'hasFiles': True }, 'pages': [{ 'id': 'page1', 'questions': [ from_question(qid, q) for qid, q in data.items() ] }] } schema = MetaSchema( name=_schema['name'], schema_version=_schema['version'], schema=_schema ) try: schema.save() except KeyExistsException: # Unfortunately, we don't have db isolation between test cases for some # reason. Update the doc currently in the db rather than saving a new # one. schema = MetaSchema.find_one( Q('name', 'eq', _schema['name']) & Q('schema_version', 'eq', _schema['version']) ) schema.schema = _schema schema.save() return schema
def get_queryset(self): prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) query = (Q('registration_schema', 'eq', prereg_schema) & Q('approval', 'ne', None)) return DraftRegistration.find(query).sort(self.ordering)
def setUp(self): super(TestMetaSchemaDetail, self).setUp() self.user = AuthUserFactory() ensure_schemas() self.schema = MetaSchema.find_one(Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', LATEST_SCHEMA_VERSION)) self.url = '/{}metaschemas/{}/'.format(API_BASE, self.schema._id)
def check_access(node, auth, action, cas_resp): """Verify that user can perform requested action on resource. Raise appropriate error code if action cannot proceed. """ permission = permission_map.get(action, None) if permission is None: raise HTTPError(httplib.BAD_REQUEST) if cas_resp: if permission == 'read': if node.is_public: return True required_scope = oauth_scopes.CoreScopes.NODE_FILE_READ else: required_scope = oauth_scopes.CoreScopes.NODE_FILE_WRITE if not cas_resp.authenticated \ or required_scope not in oauth_scopes.normalize_scopes(cas_resp.attributes['accessTokenScope']): raise HTTPError(httplib.FORBIDDEN) if permission == 'read' and node.can_view(auth): return True if permission == 'write' and node.can_edit(auth): return True # Users attempting to register projects with components might not have # `write` permissions for all components. This will result in a 403 for # all `copyto` actions as well as `copyfrom` actions if the component # in question is not public. To get around this, we have to recursively # check the node's parent node to determine if they have `write` # permissions up the stack. # TODO(hrybacki): is there a way to tell if this is for a registration? # All nodes being registered that receive the `copyto` action will have # `node.is_registration` == True. However, we have no way of telling if # `copyfrom` actions are originating from a node being registered. # TODO This is raise UNAUTHORIZED for registrations that have not been archived yet if action == 'copyfrom' or (action == 'copyto' and node.is_registration): parent = node.parent_node while parent: if parent.can_edit(auth): return True parent = parent.parent_node # Users with the PREREG_ADMIN_TAG should be allowed to download files # from prereg challenge draft registrations. try: prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) allowed_nodes = [node] + node.parents prereg_draft_registration = DraftRegistration.find( Q('branched_from', 'in', [n._id for n in allowed_nodes]) & Q('registration_schema', 'eq', prereg_schema) ) if action == 'download' and \ auth.user is not None and \ prereg_draft_registration.count() > 0 and \ settings.PREREG_ADMIN_TAG in auth.user.system_tags: return True except NoResultsFound: pass raise HTTPError(httplib.FORBIDDEN if auth.user else httplib.UNAUTHORIZED)
def get_schema(): all_schemas = MetaSchema.find() serialized_schemas = { 'schemas': [utils.serialize_meta_schema(s) for s in all_schemas] } return serialized_schemas
def archive_success(dst_pk, job_pk): """Archiver's final callback. For the time being the use case for this task is to rewrite references to files selected in a registration schema (the Prereg Challenge being the first to expose this feature). The created references point to files on the registered_from Node (needed for previewing schema data), and must be re-associated with the corresponding files in the newly created registration. :param str dst_pk: primary key of registration Node note:: At first glance this task makes redundant calls to utils.get_file_map (which returns a generator yielding (<sha256>, <file_metadata>) pairs) on the dst Node. Two notes about utils.get_file_map: 1) this function memoizes previous results to reduce overhead and 2) this function returns a generator that lazily fetches the file metadata of child Nodes (it is possible for a selected file to belong to a child Node) using a non-recursive DFS. Combined this allows for a relatively effient implementation with seemingly redundant calls. """ create_app_context() dst = Node.load(dst_pk) # The filePicker extension addded with the Prereg Challenge registration schema # allows users to select files in OSFStorage as their response to some schema # questions. These files are references to files on the unregistered Node, and # consequently we must migrate those file paths after archiver has run. Using # sha256 hashes is a convenient way to identify files post-archival. prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2)) missing_files = [] if prereg_schema in dst.registered_schema: prereg_metadata = dst.registered_meta[prereg_schema._id] updated_metadata = {} for key, question in prereg_metadata.items(): if isinstance(question['value'], dict): for subkey, subvalue in question['value'].items(): registration_file = None if subvalue.get('extra', {}).get('sha256'): registration_file, node_id = find_registration_file( subvalue, dst) if not registration_file: missing_files.append({ 'file_name': subvalue['extra']['selectedFileName'], 'question_title': find_question(prereg_schema.schema, key)['title'] }) continue subvalue['extra'].update({ 'viewUrl': VIEW_FILE_URL_TEMPLATE.format( node_id=node_id, path=registration_file['path'].lstrip('/')) }) question['value'][subkey] = subvalue else: if question.get('extra', {}).get('sha256'): registration_file, node_id = find_registration_file( question, dst) if not registration_file: missing_files.append({ 'file_name': question['extra']['selectedFileName'], 'question_title': find_question(prereg_schema.schema, key)['title'] }) continue question['extra'].update({ 'viewUrl': VIEW_FILE_URL_TEMPLATE.format( node_id=node_id, path=registration_file['path'].lstrip('/')) }) updated_metadata[key] = question if missing_files: raise ArchivedFileNotFound(registration=dst, missing_files=missing_files) prereg_metadata.update(updated_metadata) dst.registered_meta[prereg_schema._id] = prereg_metadata dst.save() job = ArchiveJob.load(job_pk) if not job.sent: job.sent = True job.save() dst.sanction.ask( dst.get_active_contributors_recursive(unique_users=True))
def draft_reg_util(): DraftRegistration.remove() ensure_schemas() return MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2))
def test_archive_success(self): ensure_schemas() file_tree = file_tree_factory(3, 3, 3) selected_files = select_files_from_tree(file_tree) node = factories.NodeFactory(creator=self.user) prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) data = { ('q_' + selected_file['name']): { 'value': fake.word(), 'extra': { 'selectedFileName': selected_file['name'], 'nodeId': node._id, 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format(node._id, selected_file['path']) } } for sha256, selected_file in selected_files.items() } object_types = { ('q_' + selected_file['name'] + '_obj'): { 'value': { name_factory(): { 'value': fake.word(), 'extra': { 'selectedFileName': selected_file['name'], 'nodeId': node._id, 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format(node._id, selected_file['path']) } }, name_factory(): { 'value': fake.word() } } } for sha256, selected_file in selected_files.items() } data.update(copy.deepcopy(object_types)) other_questions = { 'q{}'.format(i): { 'value': fake.word() } for i in range(5) } data.update(other_questions) with test_utils.mock_archive(node, schema=prereg_schema, data=data, autocomplete=True, autoapprove=True) as registration: with mock.patch.object(StorageAddonBase, '_get_file_tree', mock.Mock(return_value=file_tree)): job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) for key, question in registration.registered_meta[prereg_schema._id].items(): target = None if isinstance(question['value'], dict): target = [v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra']][0] elif 'extra' in question and 'hashes' in question['extra']: target = question if target: assert_in(registration._id, target['extra']['viewUrl']) assert_not_in(node._id, target['extra']['viewUrl']) del selected_files[target['extra']['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) assert_false(selected_files)
def test_inactive_metaschema_not_returned(self): self.schema = MetaSchema.find_one(Q('name', 'eq', 'Open-Ended Registration') & Q('schema_version', 'eq', 1)) self.url = '/{}metaschemas/{}/'.format(API_BASE, self.schema._id) res = self.app.get(self.url, auth=self.user.auth, expect_errors=True) assert_equal(res.status_code, 404)
def test_metaschema_uniqueness_is_enforced_in_the_database(self): # Using MongoDB's uniqueness instead of modular-odm's allows us to # kludge a race-less upsert in ensure_schema. MetaSchema(name='foo', schema_version=1).save() assert_raises(KeyExistsException, MetaSchema(name='foo', schema_version=1).save)
def test_archive_success_with_components(self): ensure_schemas() node = factories.NodeFactory(creator=self.user) comp1 = factories.NodeFactory(parent=node, creator=self.user) factories.NodeFactory(parent=comp1, creator=self.user) factories.NodeFactory(parent=node, creator=self.user) nodes = [n for n in node.node_and_primary_descendants()] file_trees = { n._id: file_tree_factory(3, 3, 3) for n in nodes } selected_files = {} selected_file_node_index = {} for n in nodes: file_tree = file_trees[n._id] selected = select_files_from_tree(file_tree) selected_file_node_index.update({ sha256: n._id for sha256 in selected.keys() }) selected_files.update(selected) # select files from each Node prereg_schema = MetaSchema.find_one( Q('name', 'eq', 'Prereg Challenge') & Q('schema_version', 'eq', 2) ) data = { ('q_' + selected_file['name']): { 'value': fake.word(), 'extra': { 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( selected_file_node_index[sha256], selected_file['path'] ), 'selectedFileName': selected_file['name'], 'nodeId': selected_file_node_index[sha256] } } for sha256, selected_file in selected_files.items() } object_types = { ('q_' + selected_file['name'] + '_obj'): { 'value': { name_factory(): { 'value': fake.word(), 'extra': { 'sha256': sha256, 'viewUrl': '/project/{0}/files/osfstorage{1}'.format( selected_file_node_index[sha256], selected_file['path'] ), 'selectedFileName': selected_file['name'], 'nodeId': selected_file_node_index[sha256] } }, name_factory(): { 'value': fake.word() } } } for sha256, selected_file in selected_files.items() } data.update(object_types) other_questions = { 'q{}'.format(i): { 'value': fake.word() } for i in range(5) } data.update(other_questions) with test_utils.mock_archive(node, schema=prereg_schema, data=copy.deepcopy(data), autocomplete=True, autoapprove=True) as registration: patches = [] for n in registration.node_and_primary_descendants(): file_tree = file_trees[n.registered_from._id] osfstorage = n.get_addon('osfstorage') patch = mock.patch.object( osfstorage, '_get_file_tree', mock.Mock(return_value=file_tree) ) patch.start() patches.append(patch) job = factories.ArchiveJobFactory() archive_success(registration._id, job._id) for key, question in registration.registered_meta[prereg_schema._id].items(): target = None if isinstance(question['value'], dict): target = [v for v in question['value'].values() if 'extra' in v and 'sha256' in v['extra']][0] elif 'extra' in question and 'sha256' in question['extra']: target = question if target: node_id = re.search( r'^/project/(?P<node_id>\w{5}).+$', target['extra']['viewUrl'] ).groupdict()['node_id'] assert_in( node_id, [r._id for r in registration.node_and_primary_descendants()] ) if target['extra']['sha256'] in selected_files: del selected_files[target['extra']['sha256']] else: # check non-file questions are unmodified assert_equal(data[key]['value'], question['value']) # ensure each selected file was checked assert_false(selected_files) for patch in patches: patch.stop()