def get(self, **kwargs): """Get tree json.""" try: action = request.values.get('action') comm_id = request.values.get('community') pid = kwargs.get('pid_value') if pid: if comm_id: comm = Community.get(comm_id) tree = self.record_class.get_contribute_tree(pid, int(comm.root_node_id)) else: tree = self.record_class.get_contribute_tree(pid) elif action and 'browsing' in action and comm_id is None: tree = self.record_class.get_browsing_tree() elif action and 'browsing' in action and not comm_id is None: comm = Community.get(comm_id) if not comm is None: tree = self.record_class.get_browsing_tree(int(comm.root_node_id)) else: tree = self.record_class.get_index_tree() return make_response(jsonify(tree), 200) except Exception as ex: current_app.logger.error('IndexTree Action Exception: ', ex) raise InvalidDataRESTError()
def test_basic_community_workflow(app, db, communities, deposit, deposit_file): """Test simple (without concurrent events) deposit publishing workflow.""" deposit = _publish_and_expunge(db, deposit) assert InclusionRequest.query.count() == 0 pid, record = deposit.fetch_published() assert not record.get('communities', []) # Open record for edit, request a community and publish deposit = deposit.edit() deposit['communities'] = ['c1', ] deposit = _publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() # Should contain just an InclusionRequest assert not record.get('communities', []) assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c1' assert ir.id_record == record.id # Accept a record to the community 'c1' c1 = Community.get('c1') c1.accept_record(record) record.commit() db.session.commit() assert InclusionRequest.query.count() == 0 assert record['communities'] == ['c1', ] # Open for edit and request another community deposit = deposit.edit() assert deposit['communities'] == ['c1', ] deposit['communities'] = ['c1', 'c2', ] # New request for community 'c2' deposit = _publish_and_expunge(db, deposit) deposit['communities'] = ['c1', 'c2', ] pid, record = deposit.fetch_published() assert record['communities'] == ['c1', ] assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c2' assert ir.id_record == record.id # Reject the request for community 'c2' c2 = Community.get('c2') c2.reject_record(record) db.session.commit() deposit = deposit.edit() # The deposit should not contain obsolete inclusion requests assert deposit['communities'] == ['c1', ] assert InclusionRequest.query.count() == 0 pid, record = deposit.fetch_published() assert record['communities'] == ['c1', ] # Request for removal from a previously accepted community 'c1' deposit['communities'] = [] deposit = _publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert not deposit.get('communities', []) assert not record.get('communities', []) assert InclusionRequest.query.count() == 0
def community_curation(record, user): """Generate a list of pending and accepted communities with permissions. Return a 2-tuple containing two lists, first for 'pending' and second for 'accepted' communities. Each item in both of the list is another 2-tuple of (Community, bool), describing community itself, and the permission (bool) to curate it. """ irs = InclusionRequest.query.filter_by(id_record=record.id).order_by(InclusionRequest.id_community).all() pending = [ir.community for ir in irs] accepted = [Community.get(c) for c in record.get("communities", [])] # Additionally filter out community IDs that did not resolve (None) accepted = [c for c in accepted if c] # Check for global curation permission (all communites on this record). global_perm = None if user.is_anonymous: global_perm = False elif DynamicPermission(ActionNeed("admin-access")).can(): global_perm = True if global_perm: return (pending, pending, accepted, accepted) else: return ( [c for c in pending if _can_curate(c, user, record)], [c for c in accepted if _can_curate(c, user, record, accepted=True)], pending, accepted, )
def community_curation(record, user): """Generate a list of pending and accepted communities with permissions. Return a 4-tuple of lists (in order): * 'pending' communities, which can be curated by given user * 'accepted' communities, which can be curated by given user * All 'pending' communities * All 'accepted' communities """ irs = ZenodoCommunity.get_irs(record).all() pending = list(set(ir.community for ir in irs)) accepted = [Community.get(c) for c in record.get('communities', [])] # Additionally filter out community IDs that did not resolve (None) accepted = [c for c in accepted if c] # Check for global curation permission (all communites on this record). global_perm = None if user.is_anonymous: global_perm = False elif DynamicPermission(ActionNeed('admin-access')).can(): global_perm = True if global_perm: return (pending, accepted, pending, accepted) else: return ( [c for c in pending if _can_curate(c, user, record)], [ c for c in accepted if _can_curate(c, user, record, accepted=True) ], pending, accepted, )
def _default_parser_community(community_id, qstr=None): """Default parser that uses the Q() from elasticsearch_dsl. Full text Search. Detail Search. :param qstr: Query string. :returns: Query parser. """ # add Permission filter by publish date and status comm = Community.get(community_id) root_node_id = comm.root_node_id mt = get_permission_filter(root_node_id) # multi keywords search filter kmt = _get_detail_keywords_query() # detail search if kmt: mt.extend(kmt) q = _get_search_qs_query(qs) if q: mt.append(q) else: # Full Text Search if qstr: q_s = _get_file_content_query(qstr) mt.append(q_s) return Q('bool', must=mt) if mt else Q()
def community_curation(record, user): """Generate a list of pending and accepted communities with permissions. Return a 2-tuple containing two lists, first for 'pending' and second for 'accepted' communities. Each item in both of the list is another 2-tuple of (Community, bool), describing community itself, and the permission (bool) to curate it. """ irs = InclusionRequest.query.filter_by(id_record=record.id).order_by( InclusionRequest.id_community).all() pending = [ir.community for ir in irs] accepted = [Community.get(c) for c in record.get('communities', [])] # Additionally filter out community IDs that did not resolve (None) accepted = [c for c in accepted if c] # Check for global curation permission (all communites on this record). global_perm = None if user.is_anonymous: global_perm = False elif DynamicPermission(ActionNeed('admin-access')).can(): global_perm = True if global_perm: return (pending, pending, accepted, accepted) else: return ( [c for c in pending if _can_curate(c, user, record)], [c for c in accepted if _can_curate(c, user, record, accepted=True)], pending, accepted, )
def test_fixed_communities_edit(app, db, users, communities, deposit, deposit_file, communities_autoadd_enabled): """Test automatic adding and requesting to fixed communities.""" deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert deposit['communities'] == ['zenodo', ] assert 'communities' not in record ir = InclusionRequest.query.one() assert ir.id_community == 'zenodo' assert ir.id_record == record.id # Open for edit deposit = deposit.edit() # Make sure 'zenodo' community is requested pid, record = deposit.fetch_published() assert deposit['communities'] == ['zenodo', ] assert not record.get('communities', []) assert InclusionRequest.query.count() == 1 comm = Community.get('zenodo') comm.accept_record(record) record.commit() db.session.commit() # Publish and make sure nothing is missing deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert deposit['communities'] == ['zenodo'] assert record['communities'] == ['zenodo', ] assert record['_oai']['sets'] == ['user-zenodo', ] assert InclusionRequest.query.count() == 0
def legacy_index(): """Legacy deposit.""" c_id = request.args.get('c', type=str) if c_id: c = Community.get(c_id) return redirect('/communities/{0}/upload'.format(c.id)) return redirect(url_for('invenio_deposit_ui.new'))
def test_accept_while_edit(app, db, communities, deposit, deposit_file): """Test deposit publishing with concurrent events. Accept a record, while deposit in open edit and then published. """ deposit['communities'] = ['c1', 'c2'] deposit = _publish_and_expunge(db, deposit) assert InclusionRequest.query.count() == 2 pid, record = deposit.fetch_published() assert deposit['communities'] == ['c1', 'c2'] assert not record.get('communities', []) # Open for edit deposit = deposit.edit() pid, record = deposit.fetch_published() assert deposit['communities'] == ['c1', 'c2'] assert not record.get('communities', []) assert InclusionRequest.query.count() == 2 # Accept a record meanwhile c1 = Community.get('c1') c1.accept_record(record) record.commit() db.session.commit() # Publish and make sure nothing is missing deposit = _publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert deposit['communities'] == ['c1', 'c2'] assert record['communities'] == ['c1', ] assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c2' assert ir.id_record == record.id
def community_curation(record, user): """Generate a list of pending and accepted communities with permissions. Return a 4-tuple of lists (in order): * 'pending' communities, which can be curated by given user * 'accepted' communities, which can be curated by given user * All 'pending' communities * All 'accepted' communities """ irs = ZenodoCommunity.get_irs(record).all() pending = list(set(ir.community for ir in irs)) accepted = [Community.get(c) for c in record.get('communities', [])] # Additionally filter out community IDs that did not resolve (None) accepted = [c for c in accepted if c] # Check for global curation permission (all communities on this record). global_perm = None if user.is_anonymous: global_perm = False elif Permission(ActionNeed('admin-access')).can(): global_perm = True if global_perm: return (pending, accepted, pending, accepted) else: return ( [c for c in pending if _can_curate(c, user, record)], [c for c in accepted if _can_curate(c, user, record, accepted=True)], pending, accepted, )
def new(): """Create a new deposit.""" c = Community.get(request.args.get('c', type=str)) return render_template(current_app.config['DEPOSIT_UI_NEW_TEMPLATE'], record={'_deposit': { 'id': None }}, community=c)
def _filter_by_owned_communities(self, comms): """Filter the list of communities for auto accept. :param comms: Community IDs to be filtered by the deposit owners. :type comms: list of str :returns: Community IDs, which are owned by one of the deposit owners. :rtype: list """ return [c for c in comms if Community.get(c).id_user in self['_deposit']['owners']]
def validate_publish(self): """Validate deposit.""" super(ZenodoDeposit, self).validate() if len(self.files) == 0: raise MissingFilesError() if 'communities' in self: missing = [c for c in self['communities'] if Community.get(c) is None] if missing: raise MissingCommunityError(missing)
def _filter_by_owned_communities(self, comms): """Filter the list of communities for auto accept. :param comms: Community IDs to be filtered by the deposit owners. :type comms: list of str :returns: Community IDs, which are owned by one of the deposit owners. :rtype: list """ return [c for c in comms if Community.get(c).id_user in self['_deposit']['owners']]
def _autoadd_communities(comms, record): """Add record to all communities ommiting the inclusion request. :param comms: Community IDs, to which the record should be added. :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` """ for comm_id in comms: comm = Community.get(comm_id) comm.add_record(record) # Handles oai-sets internally
def test_remove_community_by_key_del(app, db, communities, deposit, deposit_file): """Test removal of communities by key deletion. Communities can be removed by not providing or deleting the communities from the key deposit. Moreover, the redundant 'empty' keys should not be automatically added to deposit nor record. """ # If 'communities' key was not in deposit metadata, # it shouldn't be automatically added assert 'communities' not in deposit deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert 'communities' not in deposit assert 'communities' not in record assert not record['_oai'].get('sets', []) # Request for 'c1' and 'c2' deposit = deposit.edit() deposit['communities'] = [ 'c1', 'c2', ] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() # No reason to have 'communities' in record since nothing was accepted assert 'communities' not in record assert not record['_oai'].get('sets', []) # Accept 'c1' c1 = Community.get('c1') c1.accept_record(record) record.commit() pid, record = deposit.fetch_published() assert deposit['communities'] == [ 'c1', 'c2', ] assert InclusionRequest.query.count() == 1 assert record['communities'] == [ 'c1', ] assert set(record['_oai']['sets']) == set(['user-c1']) # Remove the key from deposit and publish deposit = deposit.edit() del deposit['communities'] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert 'communities' not in deposit assert 'communities' not in record assert InclusionRequest.query.count() == 0 assert not record['_oai'].get('sets', [])
def load_communities(self, data): """Load communities type.""" if not isinstance(data, list): raise ValidationError(_('Not a list.')) comm_ids = list( sorted([x['identifier'] for x in data if x.get('identifier')])) errors = {c for c in comm_ids if not Community.get(c)} if errors: raise ValidationError('Invalid communities: {0}'.format( ', '.join(errors)), field_names='communities') return comm_ids or missing
def __init__(self, community): """Construct the API object. :param community: Instantiate the API with the community. Parameter can be either the model instance, or string (community ID). :type community: invenio_communities.model.Community or str """ if isinstance(community, (text_type, string_types)): self.community = Community.get(community) else: self.community = community
def _autoadd_communities(comms, record): """Add record to all communities ommiting the inclusion request. :param comms: Community IDs, to which the record should be added. :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` """ for comm_id in comms: comm = Community.get(comm_id) if not comm.has_record(record): comm.add_record(record) # Handles oai-sets internally
def __init__(self, community): """Construct the API object. :param community: Instantiate the API with the community. Parameter can be either the model instance, or string (community ID). :type community: invenio_communities.model.Community or str """ if isinstance(community, (text_type, string_types)): self.community = Community.get(community) else: self.community = community
def migrate_record(record_uuid, logger=None): """Migrate a record.""" try: # Migrate record record = Record.get_record(record_uuid) if '$schema' in record: if logger: logger.info("Record already migrated.") return record = transform_record(record) provisional_communities = record.pop('provisional_communities', None) record.commit() # Create provisional communities. if provisional_communities: for c_id in provisional_communities: try: c = Community.get(c_id) if c: InclusionRequest.create(c, record, notify=False) else: if logger: logger.warning( "Community {0} does not exists " "(record {1}).".format( c_id, str(record.id))) except InclusionRequestExistsError: if logger: logger.warning("Inclusion request exists.") # Register DOI doi = record.get('doi') if doi: is_internal = doi.startswith('10.5281') PersistentIdentifier.create( pid_type='doi', pid_value=doi, pid_provider='datacite' if is_internal else None, object_type='rec', object_uuid=record_uuid, status=( PIDStatus.REGISTERED if is_internal else PIDStatus.RESERVED), ) db.session.commit() except NoResultFound: if logger: logger.info("Deleted record - no migration required.") except Exception: db.session.rollback() pid = PersistentIdentifier.get_by_object('recid', 'rec', record_uuid) pid.status = PIDStatus.RESERVED db.session.commit() raise
def _create_inclusion_requests(comms, record): """Create inclusion requests for communities. :param comms: Community IDs for which the inclusion requests might should be created (if they don't exist already). :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` """ for comm_id in comms: comm = Community.get(comm_id) if not InclusionRequest.get(comm_id, record.id): InclusionRequest.create(comm, record)
def _create_inclusion_requests(comms, record): """Create inclusion requests for communities. :param comms: Community IDs for which the inclusion requests might should be created (if they don't exist already). :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` """ for comm_id in comms: comm = Community.get(comm_id) if not InclusionRequest.get(comm_id, record.id): InclusionRequest.create(comm, record)
def migrate_record(record_uuid, logger=None): """Migrate a record.""" try: # Migrate record record = Record.get_record(record_uuid) if '$schema' in record: if logger: logger.info("Record already migrated.") return record = transform_record(record) provisional_communities = record.pop('provisional_communities', None) record.commit() # Create provisional communities. if provisional_communities: for c_id in provisional_communities: try: c = Community.get(c_id) if c: InclusionRequest.create(c, record, notify=False) else: if logger: logger.warning("Community {0} does not exists " "(record {1}).".format( c_id, str(record.id))) except InclusionRequestExistsError: if logger: logger.warning("Inclusion request exists.") # Register DOI doi = record.get('doi') if doi: is_internal = doi.startswith('10.5281') PersistentIdentifier.create( pid_type='doi', pid_value=doi, pid_provider='datacite' if is_internal else None, object_type='rec', object_uuid=record_uuid, status=(PIDStatus.REGISTERED if is_internal else PIDStatus.RESERVED), ) db.session.commit() except NoResultFound: if logger: logger.info("Deleted record - no migration required.") except Exception: db.session.rollback() pid = PersistentIdentifier.get_by_object('recid', 'rec', record_uuid) pid.status = PIDStatus.RESERVED db.session.commit() raise
def test_fixed_communities(app, db, users, communities, deposit, deposit_file, communities_autoadd_enabled): """Test automatic adding and requesting to fixed communities.""" deposit['grants'] = [{'title': 'SomeGrant'}, ] # 'c3' is owned by one of the deposit owner assert Community.get('c3').id_user in deposit['_deposit']['owners'] deposit['communities'] = ['c3', ] deposit = _publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert record['communities'] == ['c3', 'ecfunded'] assert deposit['communities'] == ['c3', 'ecfunded', 'zenodo'] ir = InclusionRequest.query.one() assert ir.id_community == 'zenodo' assert ir.id_record == record.id
def validate_publish(self): """Validate deposit.""" super(ZenodoDeposit, self).validate() if len(self.files) == 0: raise MissingFilesError() if self.multipart_files.count() != 0: raise OngoingMultipartUploadError() if 'communities' in self: missing = [c for c in self['communities'] if Community.get(c) is None] if missing: raise MissingCommunityError(missing)
def _remove_accepted_communities(comms, record): """Remove accepted communities. :param comms: Already accepted community IDs which no longer should have this record. :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` :returns: modified 'record' argument :rtype: `invenio_records.api.Record` """ for comm_id in comms: comm = Community.get(comm_id) if comm.has_record(record): comm.remove_record(record) # Handles oai-sets internally return record
def _create_inclusion_requests(comms, record): """Create inclusion requests for communities. :param comms: Community IDs for which the inclusion requests might should be created (if they don't exist already). :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` """ for comm_id in comms: comm_api = ZenodoCommunity(comm_id) # Check if InclusionRequest exists for any version already pending_irs = comm_api.get_comm_irs(record) if pending_irs.count() == 0 and not comm_api.has_record(record): comm = Community.get(comm_id) InclusionRequest.create(comm, record)
def _create_inclusion_requests(comms, record): """Create inclusion requests for communities. :param comms: Community IDs for which the inclusion requests might should be created (if they don't exist already). :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` """ for comm_id in comms: comm_api = ZenodoCommunity(comm_id) # Check if InclusionRequest exists for any version already pending_irs = comm_api.get_comm_irs(record) if pending_irs.count() == 0 and not comm_api.has_record(record): comm = Community.get(comm_id) InclusionRequest.create(comm, record)
def _remove_accepted_communities(comms, record): """Remove accepted communities. :param comms: Already accepted community IDs which no longer should have this record. :type comms: list of str :param record: Record corresponding to this deposit. :type record: `invenio_records.api.Record` :returns: modified 'record' argument :rtype: `invenio_records.api.Record` """ for comm_id in comms: comm = Community.get(comm_id) if comm.has_record(record): comm.remove_record(record) # Handles oai-sets internally return record
def test_remove_community_by_key_del(app, db, communities, deposit, deposit_file): """Test removal of communities by key deletion. Communities can be removed by not providing or deleting the communities from the key deposit. Moreover, the redundant 'empty' keys should not be automatically added to deposit nor record. """ # If 'communities' key was not in deposit metadata, # it shouldn't be automatically added assert 'communities' not in deposit deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert 'communities' not in deposit assert 'communities' not in record assert not record['_oai'].get('sets', []) # Request for 'c1' and 'c2' deposit = deposit.edit() deposit['communities'] = ['c1', 'c2', ] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() # No reason to have 'communities' in record since nothing was accepted assert 'communities' not in record assert not record['_oai'].get('sets', []) # Accept 'c1' c1 = Community.get('c1') c1.accept_record(record) record.commit() pid, record = deposit.fetch_published() assert deposit['communities'] == ['c1', 'c2', ] assert InclusionRequest.query.count() == 1 assert record['communities'] == ['c1', ] assert set(record['_oai']['sets']) == set(['user-c1']) # Remove the key from deposit and publish deposit = deposit.edit() del deposit['communities'] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert 'communities' not in deposit assert 'communities' not in record assert InclusionRequest.query.count() == 0 assert not record['_oai'].get('sets', [])
def test_fixed_communities(app, db, users, communities, deposit, deposit_file, communities_autoadd_enabled): """Test automatic adding and requesting to fixed communities.""" deposit['grants'] = [{'title': 'SomeGrant'}, ] # 'c3' is owned by one of the deposit owner assert Community.get('c3').id_user in deposit['_deposit']['owners'] deposit['communities'] = ['c3', ] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert record['communities'] == ['c3', 'grants_comm'] assert deposit['communities'] == ['c3', 'ecfunded', 'grants_comm', 'zenodo'] InclusionRequest.query.count() == 2 ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one() assert ir1.id_record == record.id ir2 = InclusionRequest.query.filter_by(id_community='ecfunded').one() assert ir2.id_record == record.id
def _get_simple_search_community_query(community_id, qs=None): """Query parser for simple search. :param qs: Query string. :return: Query parser. """ # add Permission filter by publish date and status comm = Community.get(community_id) root_node_id = comm.root_node_id mt = get_permission_filter(root_node_id) q = _get_search_qs_query(qs) if q: mt.append(q) mt.extend(_get_detail_keywords_query()) return Q('bool', must=mt) if mt else Q()
def get(self, community_id): """Get the details of the specified community. .. http:get:: /communities/(string:id) Returns a JSON dictionary with the details of the specified community. **Request**: .. sourcecode:: http GET /communities/communities/comm1 HTTP/1.1 Accept: application/json Content-Type: application/json Host: localhost:5000 :reqheader Content-Type: application/json :query string id: ID of an specific community to get more information. **Response**: .. sourcecode:: http HTTP/1.0 200 OK Content-Length: 334 Content-Type: application/json { "id_user": 1, "description": "", "title": "", "created": "2016-04-05T14:56:37.051462", "id": "comm1", "page": "", "curation_policy": "" } :resheader Content-Type: application/json :statuscode 200: no error :statuscode 404: page not found """ community = Community.get(community_id) if not community: abort(404) etag = community.version_id self.check_etag(etag) response = self.make_response( community, links_item_factory=default_links_item_factory) response.set_etag(etag) return response
def get(self, community_id): """Get the details of the specified community. .. http:get:: /communities/(string:id) Returns a JSON dictionary with the details of the specified community. **Request**: .. sourcecode:: http GET /communities/communities/comm1 HTTP/1.1 Accept: application/json Content-Type: application/json Host: localhost:5000 :reqheader Content-Type: application/json :query string id: ID of an specific community to get more information. **Response**: .. sourcecode:: http HTTP/1.0 200 OK Content-Length: 334 Content-Type: application/json { "id_user": 1, "description": "", "title": "", "created": "2016-04-05T14:56:37.051462", "id": "comm1", "page": "", "curation_policy": "" } :resheader Content-Type: application/json :statuscode 200: no error :statuscode 404: page not found """ community = Community.get(community_id) if not community: abort(404) etag = community.version_id self.check_etag(etag) response = self.make_response( community, links_item_factory=default_links_item_factory) response.set_etag(etag) return response
def test_fixed_autoadd_redundant(app, db, users, communities, deposit, deposit_file, communities_autoadd_enabled): """Test automatic adding and requesting to fixed communities.""" deposit['grants'] = [{'title': 'SomeGrant'}, ] # 'c3' is owned by one of the deposit owner assert Community.get('c3').id_user in deposit['_deposit']['owners'] # Requesting for 'grants_comm', which would be added automatically # shouldn't cause problems deposit['communities'] = ['c3', 'grants_comm', 'zenodo'] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert record['communities'] == ['c3', 'grants_comm'] assert deposit['communities'] == ['c3', 'ecfunded', 'grants_comm', 'zenodo'] InclusionRequest.query.count() == 2 ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one() assert ir1.id_record == record.id ir2 = InclusionRequest.query.filter_by(id_community='ecfunded').one() assert ir2.id_record == record.id
def load_communities(self, data): """Load communities type.""" if not isinstance(data, list): raise ValidationError(_('Not a list.')) invalid_format_comms = [ c for c in data if not (isinstance(c, dict) and 'identifier' in c)] if invalid_format_comms: raise ValidationError( 'Invalid community format: {}.'.format(invalid_format_comms), field_names='communities') comm_ids = list(sorted([ x['identifier'] for x in data if x.get('identifier') ])) errors = {c for c in comm_ids if not Community.get(c)} if errors: raise ValidationError( 'Invalid communities: {0}'.format(', '.join(errors)), field_names='communities') return comm_ids or missing
def test_accept_while_edit(app, db, communities, deposit, deposit_file): """Test deposit publishing with concurrent events. Accept a record, while deposit in open edit and then published. """ deposit['communities'] = ['c1', 'c2'] deposit = publish_and_expunge(db, deposit) assert InclusionRequest.query.count() == 2 pid, record = deposit.fetch_published() assert deposit['communities'] == ['c1', 'c2'] assert not record.get('communities', []) assert not record['_oai'].get('sets', []) # Open for edit deposit = deposit.edit() pid, record = deposit.fetch_published() assert deposit['communities'] == ['c1', 'c2'] assert not record.get('communities', []) assert not record['_oai'].get('sets', []) assert InclusionRequest.query.count() == 2 # Accept a record meanwhile c1 = Community.get('c1') c1.accept_record(record) record.commit() db.session.commit() # Publish and make sure nothing is missing deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert deposit['communities'] == ['c1', 'c2'] assert record['communities'] == [ 'c1', ] assert record['_oai']['sets'] == [ 'user-c1', ] assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c2' assert ir.id_record == record.id
def test_fixed_autoadd_redundant(app, db, users, communities, deposit, deposit_file, communities_autoadd_enabled): """Test automatic adding and requesting to fixed communities.""" deposit['grants'] = [ { 'title': 'SomeGrant' }, ] # 'c3' is owned by one of the deposit owner assert Community.get('c3').id_user in deposit['_deposit']['owners'] # Requesting for 'ecfunded', which would be added automatically # shouldn't cause problems deposit['communities'] = ['c3', 'ecfunded', 'zenodo'] deposit = _publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert record['communities'] == ['c3', 'ecfunded'] assert deposit['communities'] == ['c3', 'ecfunded', 'zenodo'] ir = InclusionRequest.query.one() assert ir.id_community == 'zenodo' assert ir.id_record == record.id
def test_reject_while_edit(app, db, communities, deposit, deposit_file): """Test deposit publishing with concurrent events. Reject a record, while deposit in open edit and published. """ # Request for community 'c1' deposit['communities'] = [ 'c1', ] deposit = publish_and_expunge(db, deposit) assert deposit['communities'] == [ 'c1', ] pid, record = deposit.fetch_published() assert not record.get('communities', []) assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c1' assert ir.id_record == record.id # Open deposit in edit mode and request another community 'c2' deposit = deposit.edit() deposit['communities'] = ['c1', 'c2'] # Reject the request for community 'c1' c1 = Community.get('c1') c1.reject_record(record) db.session.commit() # Publish the deposit deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() # NOTE: 'c1' is requested again! assert InclusionRequest.query.count() == 2 ir1 = InclusionRequest.query.filter_by(id_community='c1').one() ir2 = InclusionRequest.query.filter_by(id_community='c2').one() assert ir1.id_record == record.id assert ir2.id_record == record.id assert deposit['communities'] == ['c1', 'c2'] assert not record.get('communities', [])
def test_oai_set_result_count(mocker, audit_records, db, es, communities, oai_sources, issues): db_records, es_records, oai2d_records = oai_sources for recid in db_records: _, record = record_resolver.resolve(recid) record['_oai']['sets'] = ['user-c1'] record.commit() db.session.commit() indexer = RecordIndexer() for recid in es_records: _, record = record_resolver.resolve(recid) record['_oai']['sets'] = ['user-c1'] indexer.index(record) current_search.flush_and_refresh(index='records') # '/oai2d' needs straight-forward cheating... There's no way to be sure # why the endpoint sometimes fails to report the correct results. It could # be a Resumption Token issue, or even an indexing issue on Elasticsearch. # Either way, we have to be able to replicate when running on production # this behavior and report it as an issue. oai2d_ids_mock = MagicMock() oai2d_ids_mock.return_value = set(oai2d_records) oai2d_ids_mock = mocker.patch( 'zenodo.modules.auditor.oai.OAISetResultCheck' '._oai2d_endpoint_identifiers', new=oai2d_ids_mock) audit = OAIAudit('testAudit', logging.getLogger('auditorTesting'), []) check = OAISetResultCheck(audit, Community.get('c1')) check.perform() audit.clear_db_oai_set_cache() result_issues = check.issues.get('missing_ids', {}) db_issues, es_issues, api_issues = issues assert set(result_issues.get('db', [])) == set(db_issues) assert set(result_issues.get('es', [])) == set(es_issues) assert set(result_issues.get('oai2d', [])) == set(api_issues)
def _default_parser_community(community_id, qstr=None): """Default parser that uses the Q() from elasticsearch_dsl. Full text Search. Detail Search. :param qstr: Query string. :returns: Query parser. """ # add Permission filter by publish date and status comm = Community.get(community_id) root_node_id = comm.root_node_id mt = get_permission_filter(root_node_id) # multi keywords search filter kmt = _get_detail_keywords_query() # detail search if kmt: mt.extend(kmt) q = _get_search_qs_query(qs) if q: mt.append(q) else: # Full Text Search if qstr: q_s = Q('multi_match', query=qstr, operator='and', fields=[ 'content.file.content^1.5', 'content.file.content.ja^1.2', '_all', 'search_string' ], type='most_fields', minimum_should_match='75%') mt.append(q_s) return Q('bool', must=mt) if mt else Q()
def test_oai_set_result_count(mocker, audit_records, db, es, communities, oai_sources, issues): db_records, es_records, oai2d_records = oai_sources for recid in db_records: _, record = record_resolver.resolve(recid) record['_oai']['sets'] = ['user-c1'] record.commit() db.session.commit() indexer = RecordIndexer() for recid in es_records: _, record = record_resolver.resolve(recid) record['_oai']['sets'] = ['user-c1'] indexer.index(record) current_search.flush_and_refresh(index='records') # '/oai2d' needs straight-forward cheating... There's no way to be sure # why the endpoint sometimes fails to report the correct results. It could # be a Resumption Token issue, or even an indexing issue on Elasticsearch. # Either way, we have to be able to replicate when running on production # this behavior and report it as an issue. oai2d_ids_mock = MagicMock() oai2d_ids_mock.return_value = set(oai2d_records) oai2d_ids_mock = mocker.patch( 'zenodo.modules.auditor.oai.OAISetResultCheck' '._oai2d_endpoint_identifiers', new=oai2d_ids_mock) audit = OAIAudit('testAudit', logging.getLogger('auditorTesting'), []) check = OAISetResultCheck(audit, Community.get('c1')) check.perform() audit.clear_db_oai_set_cache() result_issues = check.issues.get('missing_ids', {}) db_issues, es_issues, api_issues = issues assert set(result_issues.get('db', [])) == set(db_issues) assert set(result_issues.get('es', [])) == set(es_issues) assert set(result_issues.get('oai2d', [])) == set(api_issues)
def get_self_list(cls, node_path, community_id=None): """ Get index list info. :param node_path: Identifier of the index. :return: the list of index. """ if community_id: index = node_path.rfind('/') pid = node_path[index + 1:] from invenio_communities.models import Community community_obj = Community.get(community_id) recursive_t = cls.recs_query() query = db.session.query(recursive_t).filter( db.or_(recursive_t.c.cid == pid, recursive_t.c.pid == pid)) if not get_user_roles()[0]: query = query.filter(recursive_t.c.public_state) q = query.order_by(recursive_t.c.path).all() lst = list() if node_path != '0': for item in q: if item.cid == community_obj.root_node_id \ and item.pid == '0': lst.append(item) if item.pid != '0': lst.append(item) return lst else: index = node_path.rfind('/') pid = node_path[index + 1:] recursive_t = cls.recs_query() query = db.session.query(recursive_t).filter( db.or_(recursive_t.c.pid == pid, recursive_t.c.cid == pid)) if not get_user_roles()[0]: query = query.filter(recursive_t.c.public_state) q = query.order_by(recursive_t.c.path).all() return q
def test_reject_while_edit(app, db, communities, deposit, deposit_file): """Test deposit publishing with concurrent events. Reject a record, while deposit in open edit and published. """ # Request for community 'c1' deposit['communities'] = ['c1', ] deposit = _publish_and_expunge(db, deposit) assert deposit['communities'] == ['c1', ] pid, record = deposit.fetch_published() assert not record.get('communities', []) assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c1' assert ir.id_record == record.id # Open deposit in edit mode and request another community 'c2' deposit = deposit.edit() deposit['communities'] = ['c1', 'c2'] # Reject the request for community 'c1' c1 = Community.get('c1') c1.reject_record(record) db.session.commit() # Publish the deposit deposit = _publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() # NOTE: 'c1' is requested again! assert InclusionRequest.query.count() == 2 ir1 = InclusionRequest.query.filter_by(id_community='c1').one() ir2 = InclusionRequest.query.filter_by(id_community='c2').one() assert ir1.id_record == record.id assert ir2.id_record == record.id assert deposit['communities'] == ['c1', 'c2'] assert not record.get('communities', [])
def test_edit_flow(api_client, db, es, location, json_auth_headers, deposit_url, get_json, auth_headers, json_headers, license_record, communities, resolver): """Test simple flow using REST API.""" headers = json_auth_headers client = api_client test_data = dict( metadata=dict( upload_type='presentation', title='Test title', creators=[ dict(name='Doe, John', affiliation='Atlantis'), dict(name='Smith, Jane', affiliation='Atlantis') ], description='Test Description', publication_date='2013-05-08', access_right='open', license='CC0-1.0', communities=[{'identifier': 'c1'}, {'identifier': 'c3'}], ) ) # Create deposit response = client.post( deposit_url, data=json.dumps(test_data), headers=headers) data = get_json(response, code=201) # Get identifier and links current_search.flush_and_refresh(index='deposits') links = data['links'] # Upload 3 files for i in range(3): f = 'test{0}.txt'.format(i) response = client.post( links['files'], data=dict(file=(BytesIO(b'ctx'), f), name=f), headers=auth_headers, ) assert response.status_code == 201, i # Update metadata newdata = dict(metadata=data['metadata']) newdata['metadata']['title'] = 'Updated title' resdata = get_json(client.put( links['self'], data=json.dumps(newdata), headers=headers ), code=200) # Publish deposition response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) record_id = data['record_id'] # Does record exists? current_search.flush_and_refresh(index='records') preedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) expected_doi = '10.5072/zenodo.{0}'.format(record_id) assert preedit_data['doi'] == expected_doi # - community c3 got auto-accepted (owned by deposit user) assert preedit_data['metadata']['communities'] == [{'identifier': 'c3'}] # Are files downloadable by everyone (open)? assert len(preedit_data['files']) == 3 download_url = preedit_data['files'][0]['links']['download'] assert client.get(download_url).status_code == 200 # Edit record - can now be done immediately after. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 201 # Edit - 2nd time is invalid. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 403 # FIXME 400 # Get data data = get_json(client.get(links['self'], headers=auth_headers), code=200) # Not allowed to delete assert client.delete( links['self'], headers=auth_headers).status_code == 403 # Update metadata data = dict(metadata=data['metadata']) data['metadata'].update(dict( title='New title', access_right='closed', creators=[ dict(name="Smith, Jane", affiliation="Atlantis"), dict(name="Doe, John", affiliation="Atlantis"), ], communities=[ {'identifier': 'c1'} ] )) resdata = get_json(client.put( links['self'], data=json.dumps(data), headers=headers ), code=200) assert resdata['title'] == 'New title' assert resdata['metadata']['title'] == 'New title' # Try to change DOI data['metadata']['doi'] = '10.1234/foo' data = get_json(client.put( links['self'], data=json.dumps(data), headers=headers ), code=400) # Approve community c = Community.get('c1') _, record = resolver.resolve(str(record_id)) c.accept_record(record) record.commit() db.session.commit() # Get record to confirm if both communities should be visible now assert get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200)['metadata']['communities'] == [ {'identifier': 'c1'}, {'identifier': 'c3'}, ] # Publish response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) current_search.flush_and_refresh(index='records') # - is record still accessible? postedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) # - sanity checks assert postedit_data['doi'] == expected_doi assert postedit_data['record_id'] == record_id # - files should no longer be downloadable (closed access) # - download_url worked before edit, so make sure it doesn't work now. assert len(postedit_data['files']) == 0 assert client.get(download_url).status_code == 401 # - c3 was removed, so only c1 one should be visible now assert postedit_data['metadata']['communities'] == [ {'identifier': 'c1'}, ] # Edit data = get_json(client.post(links['edit'], headers=auth_headers), code=201) # Update data = dict(metadata=data['metadata']) data['metadata'].update(dict(title='Will be discarded')) resdata = get_json(client.put( links['self'], data=json.dumps(data), headers=headers ), code=200) # Discard data = get_json( client.post(links['discard'], headers=auth_headers), code=201) # Get and assert metadata data = get_json(client.get(links['self'], headers=auth_headers), code=200) assert data['title'] == postedit_data['title']
def suggest(): """Index page with uploader and list of existing depositions. :param community_id: ID of the community to curate. """ community = None record = None url = request.referrer if "url" in request.values and request.values["url"]: url = request.values["url"] if not "community" in request.values: flash(u"Error, no {} given".format( current_app.config["COMMUNITIES_NAME"]), "danger") return redirect(url) community_id = request.values["community"] community = Community.get(community_id) if not community: flash(u"Error, unknown {} {}".format( current_app.config["COMMUNITIES_NAME"], community_id), "danger") return redirect(url) if not _get_permission("communities-read", community).can() \ and not DynamicPermission(ActionNeed('admin-access')).can(): flash(u"Error, you don't have permissions on the {} {}".format( current_app.config["COMMUNITIES_NAME"], community_id), "danger") return redirect(url) if not "recpid" in request.values: flash(u"Error, no record given", "danger") return redirect(url) recid = request.values["recpid"] resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(recid) except Exception: flash(u"Error, unkown record {}".format(recid), "danger") return redirect(url) # if the user has the curate permission on this community, # we automatically add the record if _get_permission("communities-curate", community).can(): try: community.add_record(record) except: # the record is already in the community flash(u"The record already exists in the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title), "warning") else: record.commit() flash(u"The record has been added to the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title)) # otherwise we only suggest it and it will appear in the curate list else: try: InclusionRequest.create(community=community, record=record, user=current_user) except InclusionRequestObsoleteError: # the record is already in the community flash(u"The record already exists in the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title), "warning") except InclusionRequestExistsError: flash(u"The record has already been suggested " u"to the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title), "warning") else: flash(u"The record has been suggested " u"to the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title)) db.session.commit() RecordIndexer().index_by_id(record.id) return redirect(url)
def inner(community_id, *args, **kwargs): c = Community.get(community_id) if c is None: abort(404) return f(c, *args, **kwargs)
def inner(community_id, *args, **kwargs): c = Community.get(community_id) if c is None: abort(404) return f(c, *args, **kwargs)
def test_edit_flow(datacite_mock, api_client, db, es, location, json_auth_headers, deposit_url, get_json, auth_headers, json_headers, license_record, communities, resolver): """Test simple flow using REST API.""" headers = json_auth_headers client = api_client test_data = dict(metadata=dict( upload_type='presentation', title='Test title', creators=[ dict(name='Doe, John', affiliation='Atlantis'), dict(name='Smith, Jane', affiliation='Atlantis') ], description='Test Description', publication_date='2013-05-08', access_right='open', license='CC0-1.0', communities=[{ 'identifier': 'c1' }, { 'identifier': 'c3' }], )) # Create deposit response = client.post(deposit_url, data=json.dumps(test_data), headers=headers) data = get_json(response, code=201) # Get identifier and links current_search.flush_and_refresh(index='deposits') links = data['links'] # Upload 3 files for i in range(3): f = 'test{0}.txt'.format(i) response = client.post( links['files'], data=dict(file=(BytesIO(b'ctx'), f), name=f), headers=auth_headers, ) assert response.status_code == 201, i # Update metadata newdata = dict(metadata=data['metadata']) newdata['metadata']['title'] = 'Updated title' resdata = get_json(client.put(links['self'], data=json.dumps(newdata), headers=headers), code=200) # Publish deposition response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) record_id = data['record_id'] assert PersistentIdentifier.query.filter_by(pid_type='depid').count() == 1 # There should be two 'recid' PIDs - Concept PID and version PID assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2 recid_pid = PersistentIdentifier.get('recid', str(record_id)) doi_pid = PersistentIdentifier.get(pid_type='doi', pid_value='10.5072/zenodo.1') assert doi_pid.status == PIDStatus.RESERVED # This task (datacite_register) would normally be executed asynchronously datacite_register(recid_pid.pid_value, recid_pid.object_uuid) assert doi_pid.status == PIDStatus.REGISTERED # Make sure it was registered properly in datacite # It should be called twice - for concept DOI and version DOI assert datacite_mock().metadata_post.call_count == 2 # Concept DOI call datacite_mock().doi_post.assert_any_call('10.5072/zenodo.1', 'https://zenodo.org/record/1') # Record DOI call datacite_mock().doi_post.assert_any_call('10.5072/zenodo.2', 'https://zenodo.org/record/2') # Does record exists? current_search.flush_and_refresh(index='records') preedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) expected_doi = '10.5072/zenodo.{0}'.format(record_id) assert preedit_data['doi'] == expected_doi # - community c3 got auto-accepted (owned by deposit user) assert preedit_data['metadata']['communities'] == [{'identifier': 'c3'}] # Are files downloadable by everyone (open)? assert len(preedit_data['files']) == 3 download_url = preedit_data['files'][0]['links']['download'] assert client.get(download_url).status_code == 200 # Edit record - can now be done immediately after. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 201 # Edit - 2nd time is invalid. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 403 # FIXME 400 # Get data data = get_json(client.get(links['self'], headers=auth_headers), code=200) # Not allowed to delete assert client.delete(links['self'], headers=auth_headers).status_code == 403 # Update metadata data = dict(metadata=data['metadata']) data['metadata'].update( dict(title='New title', access_right='closed', creators=[ dict(name="Smith, Jane", affiliation="Atlantis"), dict(name="Doe, John", affiliation="Atlantis"), ], communities=[{ 'identifier': 'c1' }])) resdata = get_json(client.put(links['self'], data=json.dumps(data), headers=headers), code=200) assert resdata['title'] == 'New title' assert resdata['metadata']['title'] == 'New title' # Try to change DOI data['metadata']['doi'] = '10.1234/foo' data = get_json(client.put(links['self'], data=json.dumps(data), headers=headers), code=400) # Approve community c = Community.get('c1') _, record = resolver.resolve(str(record_id)) c.accept_record(record) record.commit() db.session.commit() # Get record to confirm if both communities should be visible now assert get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200)['metadata']['communities'] == [ { 'identifier': 'c1' }, { 'identifier': 'c3' }, ] # Publish response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) current_search.flush_and_refresh(index='records') # - is record still accessible? postedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) # - sanity checks assert postedit_data['doi'] == expected_doi assert postedit_data['record_id'] == record_id # - files should no longer be downloadable (closed access) # - download_url worked before edit, so make sure it doesn't work now. assert 'files' not in postedit_data assert client.get(download_url).status_code == 404 # - c3 was removed, so only c1 one should be visible now assert postedit_data['metadata']['communities'] == [ { 'identifier': 'c1' }, ] # Edit data = get_json(client.post(links['edit'], headers=auth_headers), code=201) # Update data = dict(metadata=data['metadata']) data['metadata'].update(dict(title='Will be discarded')) resdata = get_json(client.put(links['self'], data=json.dumps(data), headers=headers), code=200) # Discard data = get_json(client.post(links['discard'], headers=auth_headers), code=201) # Get and assert metadata data = get_json(client.get(links['self'], headers=auth_headers), code=200) assert data['title'] == postedit_data['title']
def test_edit_flow( mocker, api, api_client, db, es, locations, json_auth_headers, deposit_url, get_json, auth_headers, json_headers, license_record, communities, resolver, ): """Test simple flow using REST API.""" # Stash the configuration and enable SIP writing to disk orig = api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = True datacite_mock = mocker.patch( 'invenio_pidstore.providers.datacite.DataCiteMDSClient') archive_task_mock = mocker.patch( 'zenodo.modules.deposit.receivers.archive_sip') headers = json_auth_headers client = api_client test_data = dict(metadata=dict( upload_type='presentation', title='Test title', creators=[ dict(name='Doe, John', affiliation='Atlantis'), dict(name='Smith, Jane', affiliation='Atlantis') ], description='Test Description', publication_date='2013-05-08', access_right='open', license='CC0-1.0', communities=[{ 'identifier': 'c1' }, { 'identifier': 'c3' }], )) # Create deposit response = client.post(deposit_url, data=json.dumps(test_data), headers=headers) data = get_json(response, code=201) # Get identifier and links current_search.flush_and_refresh(index='deposits') links = data['links'] # Upload 3 files for i in range(3): f = 'test{0}.txt'.format(i) response = client.post( links['files'], data=dict(file=(BytesIO(b'ctx'), f), name=f), headers=auth_headers, ) assert response.status_code == 201, i # Update metadata newdata = dict(metadata=data['metadata']) newdata['metadata']['title'] = 'Updated title' resdata = get_json(client.put(links['self'], data=json.dumps(newdata), headers=headers), code=200) assert not archive_task_mock.delay.called # Publish deposition response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) record_id = data['record_id'] assert PersistentIdentifier.query.filter_by(pid_type='depid').count() == 1 # There should be two 'recid' PIDs - Concept PID and version PID assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2 recid_pid = PersistentIdentifier.get('recid', str(record_id)) doi_pid = PersistentIdentifier.get(pid_type='doi', pid_value='10.5072/zenodo.1') assert doi_pid.status == PIDStatus.RESERVED # This task (datacite_register) would normally be executed asynchronously datacite_register(recid_pid.pid_value, recid_pid.object_uuid) assert doi_pid.status == PIDStatus.REGISTERED # Make sure it was registered properly in datacite # It should be called twice - for concept DOI and version DOI assert datacite_mock().metadata_post.call_count == 2 # Concept DOI call datacite_mock().doi_post.assert_any_call('10.5072/zenodo.1', 'https://zenodo.org/record/1') # Record DOI call datacite_mock().doi_post.assert_any_call('10.5072/zenodo.2', 'https://zenodo.org/record/2') # Does record exists? current_search.flush_and_refresh(index='records') # Was SIP writing task executed? sip = RecordSIP.query.filter_by(pid_id=recid_pid.id).one().sip archive_task_mock.delay.assert_called_with(str(sip.id)) preedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) expected_doi = '10.5072/zenodo.{0}'.format(record_id) assert preedit_data['doi'] == expected_doi # - community c3 got auto-accepted (owned by deposit user) assert preedit_data['metadata']['communities'] == [{'identifier': 'c3'}] # Are files downloadable by everyone (open)? assert len(preedit_data['files']) == 3 download_url = preedit_data['files'][0]['links']['download'] assert client.get(download_url).status_code == 200 # Edit record - can now be done immediately after. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 201 # Edit - 2nd time is invalid. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 403 # FIXME 400 # Get data data = get_json(client.get(links['self'], headers=auth_headers), code=200) # Not allowed to delete assert client.delete(links['self'], headers=auth_headers).status_code == 403 # Update metadata data = dict(metadata=data['metadata']) data['metadata'].update( dict(title='New title', access_right='closed', creators=[ dict(name="Smith, Jane", affiliation="Atlantis"), dict(name="Doe, John", affiliation="Atlantis"), ], communities=[{ 'identifier': 'c1' }])) resdata = get_json(client.put(links['self'], data=json.dumps(data), headers=headers), code=200) assert resdata['title'] == 'New title' assert resdata['metadata']['title'] == 'New title' # Try to change DOI data['metadata']['doi'] = '10.1234/foo' data = get_json(client.put(links['self'], data=json.dumps(data), headers=headers), code=400) # Approve community c = Community.get('c1') _, record = resolver.resolve(str(record_id)) c.accept_record(record) record.commit() db.session.commit() # Get record to confirm if both communities should be visible now assert get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200)['metadata']['communities'] == [ { 'identifier': 'c1' }, { 'identifier': 'c3' }, ] # Publish response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) current_search.flush_and_refresh(index='records') # - is record still accessible? postedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) # - sanity checks assert postedit_data['doi'] == expected_doi assert postedit_data['record_id'] == record_id # - files should no longer be downloadable (closed access) # - download_url worked before edit, so make sure it doesn't work now. assert 'files' not in postedit_data assert client.get(download_url).status_code == 404 # - c3 was removed, so only c1 one should be visible now assert postedit_data['metadata']['communities'] == [ { 'identifier': 'c1' }, ] # Was the second SIP sent for archiving? sip2 = RecordSIP.query.filter_by(pid_id=recid_pid.id).order_by( RecordSIP.created.desc()).first().sip archive_task_mock.delay.assert_called_with(str(sip2.id)) # Get newversion url before editing the record data = get_json(client.get(links['self'], headers=auth_headers), code=200) new_version_url = data['links']['newversion'] assert new_version_url ==\ 'http://localhost/deposit/depositions/2/actions/newversion' # Edit data = get_json(client.post(links['edit'], headers=auth_headers), code=201) # Update data = dict(metadata=data['metadata']) data['metadata'].update(dict(title='Will be discarded')) resdata = get_json(client.put(links['self'], data=json.dumps(data), headers=headers), code=200) # Discard data = get_json(client.post(links['discard'], headers=auth_headers), code=201) # Get and assert metadata data = get_json(client.get(links['self'], headers=auth_headers), code=200) assert data['title'] == postedit_data['title'] # New Version data = get_json(client.post(new_version_url, headers=auth_headers), code=201) links = data['links'] # Check if UI new version link is correct assert links['latest_draft_html'] ==\ 'http://localhost/deposit/3' # Get latest version data = get_json(client.get(links['latest_draft'], headers=auth_headers), code=200) links = data['links'] # Update new version data = dict(metadata=data['metadata']) data['metadata'].update(dict(title='This is the new version')) resdata = get_json(client.put(links['self'], data=json.dumps(data), headers=headers), code=200) links = resdata['links'] # Add a file to the new deposit res = get_json(client.put( links['bucket'] + '/newfile.txt', input_stream=BytesIO(b'newfile'), headers=auth_headers, ), code=200) # Publish the new record response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) links = data['links'] # Get the new record data = get_json(client.get(links['record'], headers=auth_headers), code=200) # See that the title is updated accordingly assert data['metadata']['title'] == 'This is the new version' # Change the config back api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = orig
def test_edit_flow(mocker, api_client, db, es, location, json_auth_headers, deposit_url, get_json, auth_headers, json_headers, license_record, communities, resolver): """Test simple flow using REST API.""" datacite_mock = mocker.patch( 'invenio_pidstore.providers.datacite.DataCiteMDSClient') headers = json_auth_headers client = api_client test_data = dict( metadata=dict( upload_type='presentation', title='Test title', creators=[ dict(name='Doe, John', affiliation='Atlantis'), dict(name='Smith, Jane', affiliation='Atlantis') ], description='Test Description', publication_date='2013-05-08', access_right='open', license='CC0-1.0', communities=[{'identifier': 'c1'}, {'identifier': 'c3'}], ) ) # Create deposit response = client.post( deposit_url, data=json.dumps(test_data), headers=headers) data = get_json(response, code=201) # Get identifier and links current_search.flush_and_refresh(index='deposits') links = data['links'] # Upload 3 files for i in range(3): f = 'test{0}.txt'.format(i) response = client.post( links['files'], data=dict(file=(BytesIO(b'ctx'), f), name=f), headers=auth_headers, ) assert response.status_code == 201, i # Update metadata newdata = dict(metadata=data['metadata']) newdata['metadata']['title'] = 'Updated title' resdata = get_json(client.put( links['self'], data=json.dumps(newdata), headers=headers ), code=200) # Publish deposition response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) record_id = data['record_id'] assert PersistentIdentifier.query.filter_by(pid_type='depid').count() == 1 # There should be two 'recid' PIDs - Concept PID and version PID assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2 recid_pid = PersistentIdentifier.get('recid', str(record_id)) doi_pid = PersistentIdentifier.get( pid_type='doi', pid_value='10.5072/zenodo.1') assert doi_pid.status == PIDStatus.RESERVED # This task (datacite_register) would normally be executed asynchronously datacite_register(recid_pid.pid_value, recid_pid.object_uuid) assert doi_pid.status == PIDStatus.REGISTERED # Make sure it was registered properly in datacite # It should be called twice - for concept DOI and version DOI assert datacite_mock().metadata_post.call_count == 2 # Concept DOI call datacite_mock().doi_post.assert_any_call( '10.5072/zenodo.1', 'https://zenodo.org/record/1') # Record DOI call datacite_mock().doi_post.assert_any_call( '10.5072/zenodo.2', 'https://zenodo.org/record/2') # Does record exists? current_search.flush_and_refresh(index='records') preedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) expected_doi = '10.5072/zenodo.{0}'.format(record_id) assert preedit_data['doi'] == expected_doi # - community c3 got auto-accepted (owned by deposit user) assert preedit_data['metadata']['communities'] == [{'identifier': 'c3'}] # Are files downloadable by everyone (open)? assert len(preedit_data['files']) == 3 download_url = preedit_data['files'][0]['links']['download'] assert client.get(download_url).status_code == 200 # Edit record - can now be done immediately after. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 201 # Edit - 2nd time is invalid. response = client.post(links['edit'], headers=auth_headers) assert response.status_code == 403 # FIXME 400 # Get data data = get_json(client.get(links['self'], headers=auth_headers), code=200) # Not allowed to delete assert client.delete( links['self'], headers=auth_headers).status_code == 403 # Update metadata data = dict(metadata=data['metadata']) data['metadata'].update(dict( title='New title', access_right='closed', creators=[ dict(name="Smith, Jane", affiliation="Atlantis"), dict(name="Doe, John", affiliation="Atlantis"), ], communities=[ {'identifier': 'c1'} ] )) resdata = get_json(client.put( links['self'], data=json.dumps(data), headers=headers ), code=200) assert resdata['title'] == 'New title' assert resdata['metadata']['title'] == 'New title' # Try to change DOI data['metadata']['doi'] = '10.1234/foo' data = get_json(client.put( links['self'], data=json.dumps(data), headers=headers ), code=400) # Approve community c = Community.get('c1') _, record = resolver.resolve(str(record_id)) c.accept_record(record) record.commit() db.session.commit() # Get record to confirm if both communities should be visible now assert get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200)['metadata']['communities'] == [ {'identifier': 'c1'}, {'identifier': 'c3'}, ] # Publish response = client.post(links['publish'], headers=auth_headers) data = get_json(response, code=202) current_search.flush_and_refresh(index='records') # - is record still accessible? postedit_data = get_json(client.get( url_for('invenio_records_rest.recid_item', pid_value=record_id), headers=json_headers, ), code=200) # - sanity checks assert postedit_data['doi'] == expected_doi assert postedit_data['record_id'] == record_id # - files should no longer be downloadable (closed access) # - download_url worked before edit, so make sure it doesn't work now. assert 'files' not in postedit_data assert client.get(download_url).status_code == 404 # - c3 was removed, so only c1 one should be visible now assert postedit_data['metadata']['communities'] == [ {'identifier': 'c1'}, ] # Edit data = get_json(client.post(links['edit'], headers=auth_headers), code=201) # Update data = dict(metadata=data['metadata']) data['metadata'].update(dict(title='Will be discarded')) resdata = get_json(client.put( links['self'], data=json.dumps(data), headers=headers ), code=200) # Discard data = get_json( client.post(links['discard'], headers=auth_headers), code=201) # Get and assert metadata data = get_json(client.get(links['self'], headers=auth_headers), code=200) assert data['title'] == postedit_data['title']
def get_community_by_id(self, community_id): """""" from invenio_communities.models import Community c = Community.get(community_id) return c
def new(): """Create a new deposit.""" c = Community.get(request.args.get('c', type=str)) return render_template(current_app.config['DEPOSIT_UI_NEW_TEMPLATE'], record={'_deposit': {'id': None}}, community=c)
def test_basic_community_workflow(app, db, communities, deposit, deposit_file): """Test simple (without concurrent events) deposit publishing workflow.""" deposit = publish_and_expunge(db, deposit) assert InclusionRequest.query.count() == 0 pid, record = deposit.fetch_published() assert not record.get('communities', []) # Open record for edit, request a community and publish deposit = deposit.edit() deposit['communities'] = ['c1', ] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() # Should contain just an InclusionRequest assert not record.get('communities', []) assert not record['_oai'].get('sets', []) assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c1' assert ir.id_record == record.id # Accept a record to the community 'c1' c1 = Community.get('c1') c1.accept_record(record) record.commit() db.session.commit() assert InclusionRequest.query.count() == 0 assert record['communities'] == ['c1', ] assert record['_oai']['sets'] == ['user-c1', ] # Open for edit and request another community deposit = deposit.edit() assert deposit['communities'] == ['c1', ] deposit['communities'] = ['c1', 'c2', ] # New request for community 'c2' deposit = publish_and_expunge(db, deposit) deposit['communities'] = ['c1', 'c2', ] pid, record = deposit.fetch_published() assert record['communities'] == ['c1', ] assert record['_oai']['sets'] == ['user-c1', ] assert InclusionRequest.query.count() == 1 ir = InclusionRequest.query.one() assert ir.id_community == 'c2' assert ir.id_record == record.id # Reject the request for community 'c2' c2 = Community.get('c2') c2.reject_record(record) db.session.commit() deposit = deposit.edit() # The deposit should not contain obsolete inclusion requests assert deposit['communities'] == ['c1', ] assert InclusionRequest.query.count() == 0 pid, record = deposit.fetch_published() assert record['communities'] == ['c1', ] assert record['_oai']['sets'] == ['user-c1', ] # Request for removal from a previously accepted community 'c1' deposit['communities'] = [] deposit = publish_and_expunge(db, deposit) pid, record = deposit.fetch_published() assert not deposit.get('communities', []) assert not record.get('communities', []) assert not record['_oai'].get('sets', []) assert InclusionRequest.query.count() == 0