def recid(pid_value=None): resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(pid_value) except: abort(404) is_public = ActionUsers.query.filter( ActionUsers.action == 'records-read', ActionUsers.user_id.is_(None)).first() permission_read_record = read_permission_factory(record) if is_public or permission_read_record.can(): return record_view(pid_value, resolver, ['records/detail-' + record.get("collections", [""])[ 0] + '.html', 'records/detail.html'], None, default_view_method ) abort(403)
def run(self, event): """Process the circulation event. This method builds the frame, fetching the item and calling *_run* in a nested transaction. """ resolver = Resolver(pid_type='crcitm', object_type='rec', getter=Item.get_record) _, item = resolver.resolve(event.payload['item_id']) self.circulation_event_schema.context['item'] = item data, errors = self.circulation_event_schema.load(event.payload) if errors: event.response_code = 400 event.response = {'message': errors} return if data.get('dry_run'): event.response_code = 204 return with db.session.begin_nested(): data, _ = self.circulation_event_schema.dump(data) self._run(item, data) item.commit() RecordIndexer().index(item)
def prepare_authors_data_for_pushing_to_orcid(json): """ Extracts the authors with valid orcid credentials from the list of authors of a given record in json format. """ resolver = Resolver(pid_type='literature', object_type='rec', getter=lambda x: x) record_id = resolver.resolve(json.get('control_number'))[ 0].object_uuid authors = get_orcid_valid_authors(json) token = None author_orcid = '' authors_with_orcid_credentials = [] for author in authors: try: token, author_orcid = get_authors_credentials(author['_source']) except AttributeError: continue try: authors_with_orcid_credentials.append((InspireOrcidRecords.query.filter_by( orcid=author_orcid, record_id=record_id).first().put_code, token, author_orcid, record_id)) except AttributeError: authors_with_orcid_credentials.append( ([], token, author_orcid, record_id)) continue return authors_with_orcid_credentials
def get_record_by_pid(cls, pid, with_deleted=False): """Get ils record by pid value.""" resolver = Resolver(pid_type=cls._pid_type, object_type="rec", getter=cls.get_record) _, record = resolver.resolve(str(pid)) return record
def recid(pid_value=None): resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(pid_value) except: abort(404) is_public = ActionUsers.query.filter( ActionUsers.action == 'records-read', ActionUsers.user_id.is_(None)).first() permission_read_record = read_permission_factory(record) if is_public or permission_read_record.can(): return record_view(pid_value, resolver, ['records/detail-'+record.get("collections", [""])[0]+'.html', 'records/detail.html'], None, default_view_method ) abort(403)
def record_permissions(pid_value=None): resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(pid_value) permissions = get_record_permissions(record.id) result = dict() result['permissions'] = [] collab_egroups = current_app.config.get('CAP_COLLAB_EGROUPS') if record.get('experiment', None): result['collab_egroup'] = six.next( six.itervalues(collab_egroups.get(record['experiment'])) )[0] for p in permissions: if isinstance(p, ActionUsers) and p.user: result['permissions'].append( {"action": p.action, "user": {"email": p.user.email}} ) elif isinstance(p, ActionRoles) and p.role: result['permissions'].append( {"action": p.action, "user": {"email": p.role.name}} ) resp = jsonify(**result) resp.status_code = 200 return resp
def get_record_by_data(cls, data): # depending of the providers this method can be more complex, meaning using other # external PIDs like url or doi assert cls.oai_provider resolver = Resolver( pid_type=cls.oai_provider.pid_type, object_type=cls.object_type, getter=cls.get_record, ) try: pid = cls.oai_provider.get_pid_from_data(data=data) try: persistent_identifier, record = resolver.resolve(str(pid)) return record except PIDDeletedError: PersistentIdentifier.query.filter_by( pid_type=pids.RECORD_SOURCE_OAI_PID_TYPE, pid_value=str(pid)).delete() db.session.commit() return None except Exception as e: print(traceback.format_exc()) persistent_identifier = PersistentIdentifier.get( pids.RECORD_SOURCE_OAI_PID_TYPE, str(pid)) persistent_identifier.unassign() persistent_identifier.status == PIDStatus.NEW persistent_identifier.delete() db.session.commit() return None # return super(IrokoRecord, cls).get_record( # persistent_identifier.object_uuid, with_deleted=with_deleted # ) except PIDDoesNotExistError: return None
def prepare_authors_data_for_pushing_to_orcid(data): """Extract the authors with valid ORCID credentials. It uses the list of authors from a given record in JSON format. """ pid_type = current_app.config['ORCID_RECORDS_PID_TYPE'] resolver = Resolver(pid_type=pid_type, object_type='rec', getter=lambda x: x) fetcher_name = current_app.config['ORCID_RECORDS_PID_FETCHER'] pid = current_pidstore.fetchers[fetcher_name](None, data) record_identifier = pid.pid_value record_id = resolver.resolve(record_identifier)[0].object_uuid authors = get_orcid_valid_authors(data) token = None author_orcid = '' authors_with_orcid_credentials = [] for author in authors: try: token, author_orcid = get_authors_credentials(author) except AttributeError: continue try: authors_with_orcid_credentials.append(( ORCIDRecords.query.filter_by( orcid=author_orcid, record_id=record_id ).first().put_code, token, author_orcid, record_id )) except AttributeError: authors_with_orcid_credentials.append( ([], token, author_orcid, record_id)) continue return authors_with_orcid_credentials
def prepare_authors_data_for_pushing_to_orcid(json): """ Extracts the authors with valid orcid credentials from the list of authors of a given record in json format. """ resolver = Resolver(pid_type='literature', object_type='rec', getter=lambda x: x) record_id = resolver.resolve(json.get('control_number'))[ 0].object_uuid authors = get_orcid_valid_authors(json) token = None author_orcid = '' authors_with_orcid_credentials = [] for author in authors: try: token, author_orcid = get_authors_credentials(author) except AttributeError: continue try: authors_with_orcid_credentials.append((InspireOrcidRecords.query.filter_by( orcid=author_orcid, record_id=record_id).first().put_code, token, author_orcid, record_id)) except AttributeError: authors_with_orcid_credentials.append( ([], token, author_orcid, record_id)) continue return authors_with_orcid_credentials
def test_record_files_migration(app, location, script_info, datadir): """Test CDS records and files migrations.""" runner = CliRunner() filepath = join(datadir, 'cds_records_and_files_dump.json') result = runner.invoke( cli, ['dumps', 'loadrecords', filepath], obj=script_info) assert result.exit_code == 0 assert RecordMetadata.query.count() == 1 # CERN Theses resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(1198695) assert record assert record.revision_id == 34 # 33 from the dump and 1 for the files assert record['main_entry_personal_name']['personal_name'] == 'Caffaro, J' assert 'CERN' in record['subject_indicator'] assert record['control_number'] == '1198695' assert record['title_statement']['title'] == \ 'Improving the Formatting Tools of CDS Invenio' assert record['source_of_acquisition'][0]['stock_number'] == \ 'CERN-THESIS-2009-057' assert '_files' in record assert record['_files'][0]['key'] == 'CERN-THESIS-2009-057.pdf' assert record['_files'][0]['doctype'] == 'CTH_FILE'
def create_or_update_record(data, pid_type, id_key, minter): """Register a funder or grant.""" resolver = Resolver( pid_type=pid_type, object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(data[id_key]) data_c = deepcopy(data) del data_c['remote_modified'] record_c = deepcopy(data) del record_c['remote_modified'] # All grants on OpenAIRE are modified periodically even if nothing # has changed. We need to check for actual differences in the metadata if data_c != record_c: record.update(data) record.commit() record_id = record.id db.session.commit() RecordIndexer().index_by_id(str(record_id)) except PIDDoesNotExistError: record = Record.create(data) record_id = record.id minter(record.id, data) db.session.commit() RecordIndexer().index_by_id(str(record_id))
def create_or_update_record(data, pid_type, id_key, minter): """Register a funder or grant.""" resolver = Resolver(pid_type=pid_type, object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(data[id_key]) data_c = deepcopy(data) del data_c['remote_modified'] record_c = deepcopy(record) del record_c['remote_modified'] # All grants on OpenAIRE are modified periodically even if nothing # has changed. We need to check for actual differences in the metadata if data_c != record_c: record.update(data) record.commit() record_id = record.id db.session.commit() RecordIndexer().index_by_id(str(record_id)) except PIDDoesNotExistError: record = Record.create(data) record_id = record.id minter(record.id, data) db.session.commit() RecordIndexer().index_by_id(str(record_id))
def test_deposit_load_task(dummy_location, deposit_dump, deposit_user, deposit_record_pid): """Test the deposit loading task.""" # Create a user and a record with PID corresponding with test deposit data assert RecordMetadata.query.count() == 1 for dep in deposit_dump: load_deposit(dep) assert RecordMetadata.query.count() == 2 res = Resolver(pid_type="depid", object_type="rec", getter=Record.get_record) dep_pid, dep_rec = res.resolve("1") assert "_deposit" in dep_rec assert "_files" in dep_rec sip = SIP.query.one() assert sip.user_id == deposit_user.id rsip = RecordSIP.query.one() assert rsip.pid_id == deposit_record_pid.id assert rsip.sip_id == sip.id # Test RecordsFiles API res = Resolver(pid_type="depid", object_type="rec", getter=RecordFiles.get_record) dep_pid, dep_recbucket = res.resolve("1") files = list(dep_recbucket.files) assert files[0]["key"] == "bazbar.pdf" assert files[0]["size"] == 12345 assert files[0]["checksum"] == "md5:00000000000000000000000000000000" assert files[0]["bucket"] assert SIPFile.query.count() == 1
def test_deposit_load_task(dummy_location, deposit_dump, deposit_user, deposit_record_pid): """Test the deposit loading task.""" # Create a user and a record with PID corresponding with test deposit data assert RecordMetadata.query.count() == 1 for dep in deposit_dump: load_deposit.delay(dep) assert RecordMetadata.query.count() == 2 res = Resolver(pid_type='depid', object_type='rec', getter=Record.get_record) dep_pid, dep_rec = res.resolve('1') assert '_deposit' in dep_rec assert '_files' in dep_rec sip = SIP.query.one() assert sip.user_id == deposit_user.id rsip = RecordSIP.query.one() assert rsip.pid_id == deposit_record_pid.id assert rsip.sip_id == sip.id # Test RecordsFiles API res = Resolver(pid_type='depid', object_type='rec', getter=RecordFiles.get_record) dep_pid, dep_recbucket = res.resolve('1') files = list(dep_recbucket.files) assert files[0]['key'] == 'bazbar.pdf' assert files[0]['size'] == 12345 assert files[0]['checksum'] == "00000000000000000000000000000000" assert files[0]['bucket'] assert SIPFile.query.count() == 1
def create_reana_workflow(): """Create a reana workflow by json.""" _args = request.get_json() # try fetch the deposit with the provided PID try: resolver = Resolver(pid_type='depid', object_type='rec', getter=lambda x: x) deposit, rec_uuid = resolver.resolve(_args.get('pid')) except PIDDoesNotExistError: abort( 404, "You tried to create a workflow and connect" " it with a non-existing record") # if record exist check if the user has 'deposit-update' rights with UpdateDepositPermission(deposit).require(403): token = get_reana_token(rec_uuid) name = _args.get('workflow_name') workflow_name = generate_slug(2) workflow_json = _args.get('workflow_json') try: resp = create_workflow(workflow_json, workflow_name, token) except ValidationError as e: return jsonify({'message': e.message}), 400 except Exception: return jsonify({ 'message': 'An exception has occured while creating ' 'the workflow in REANA.' }), 400 # create a workflow dict, which can be used to populate # the db, but also used in the serializer _workflow = { 'service': 'reana', 'user_id': current_user.id, 'name': name, 'workflow_name': workflow_name, 'name_run': resp['workflow_name'], 'workflow_id': resp['workflow_id'], 'rec_uuid': str(rec_uuid), 'depid': _args.get('pid'), 'status': 'created', 'workflow_json': workflow_json, } # TOFIX: check for integrity errors workflow = ReanaWorkflow(**_workflow) db.session.add(workflow) db.session.commit() workflow_serialized = ReanaWorkflowSchema().dump(_workflow).data return jsonify(workflow_serialized)
def get_record_by_pid(cls, pid, with_deleted=False): """Get ils record by pid value.""" resolver = Resolver( pid_type=CIRCULATION_LOAN_PID_TYPE, object_type="rec", getter=cls.get_record, ) persistent_identifier, record = resolver.resolve(str(pid)) return record
def source_resolver(pid): """Resolve referenced user.""" resolver = Resolver(pid_type='srcid', object_type="src", getter=Record.get_record) _, record = resolver.resolve(pid) del record['$schema'] return record
def resolve_depid(depid): """Resolve the workflow id into a UUID.""" resolver = Resolver(pid_type='depid', object_type='rec', getter=lambda x: x) # deposit, rec_uuid = resolver.resolve(depid) # workflow = ReanaWorkflow.query.filter_by(workflow_id=workflow_id).first() return resolver.resolve(depid)
def get_record_by_legacy_recid(cls, legacy_pid_type, pid_value): """Get ils record by pid value and pid type.""" resolver = Resolver( pid_type=legacy_pid_type, object_type="rec", getter=cls.get_record, ) pid, record = resolver.resolve(str(pid_value)) return record
def curate(community): """Index page with uploader and list of existing depositions. :param community_id: ID of the community to curate. """ if request.method == 'POST': action = request.json.get('action') recid = request.json.get('recid') # 'recid' is mandatory if not recid: return jsonify({'status': 'danger', 'msg': _('Unknown record')}) if action not in ['accept', 'reject', 'remove']: return jsonify({'status': 'danger', 'msg': _('Unknown action')}) # Resolve recid to a Record resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(recid) action_name = "" status = "success" # Perform actions try: if action == "accept": community.accept_record(record) action_name = "added to" elif action == "reject": community.reject_record(record) action_name = "rejected from" status = "info" elif action == "remove": community.remove_record(record) action_name = "removed from" status = "info" except CommunitiesError: return jsonify({ 'status': 'danger', 'msg': _('record not in the curation list,' ' please refresh the page.')}) record.commit() db.session.commit() RecordIndexer().index_by_id(record.id) title = "" if "title_statement" in record \ and "title" in record["title_statement"]: title = record["title_statement"]["title"] message = _('The record ' '"{}" has been {} the community.').format(title, action_name) return jsonify({'status': status, 'msg': message}) ctx = {'community': community} return render_template( current_app.config['COMMUNITIES_CURATE_TEMPLATE'], **ctx )
def get_record_by_pid(cls, pid, with_deleted=False): """Get ils record by pid value.""" resolver = Resolver( pid_type=CIRCULATION_LOAN_PID_TYPE, object_type="rec", getter=cls.get_record, ) _, record = resolver.resolve(str(pid)) return record
def institution_resolver(pid): """Resolve referenced institution.""" resolver = Resolver(pid_type='inst', object_type="rec", getter=Record.get_record) _, record = resolver.resolve(pid) del record['$schema'] return record
def user_resolver(pid): """Resolve referenced user.""" resolver = Resolver(pid_type='user', object_type="rec", getter=Record.get_record) _, record = resolver.resolve(pid) del record['$schema'] return record
def fetch_published(self): """Return a tuple with PID and published record.""" pid_type = self['_deposit']['pid']['type'] pid_value = self['_deposit']['pid']['value'] resolver = Resolver(pid_type=pid_type, object_type='rec', getter=partial(Record.get_record, with_deleted=True)) return resolver.resolve(pid_value)
def get_record_by_pid(cls, pid, with_deleted=False): """Get ils record by pid value.""" from .config import _CIRCULATION_LOAN_PID_TYPE resolver = Resolver( pid_type=_CIRCULATION_LOAN_PID_TYPE, object_type='rec', getter=cls.get_record, ) persistent_identifier, record = resolver.resolve(str(pid)) return record
def record_jsonresolver(authid): """Resolve referenced author.""" # Setup a resolver to retrive an author record given its id resolver = Resolver(pid_type='authid', object_type="rec", getter=Record.get_record) _, record = resolver.resolve(str(authid)) # we could manipulate here the record and eventually add/remove fields del record['$schema'] return record
def get_record_by_pid(cls, pid, with_deleted=False): """Get ils record by pid value.""" assert cls.provider resolver = Resolver(pid_type=cls.provider.pid_type, object_type=cls.object_type, getter=cls.get_record) persistent_identifier, record = resolver.resolve(str(pid)) return super(IlsRecord, cls).get_record(persistent_identifier.object_uuid, with_deleted=with_deleted)
def fetch_published(self): """Return a tuple with PID and published record.""" pid_type = self['_deposit']['pid']['type'] pid_value = self['_deposit']['pid']['value'] resolver = Resolver( pid_type=pid_type, object_type='rec', getter=partial(Record.get_record, with_deleted=True) ) return resolver.resolve(pid_value)
def get_record_by_legacy_recid(cls, pid_value): """Get ils record by pid value and pid type.""" legacy_pid_type = current_app.config["CDS_ILS_RECORD_LEGACY_PID_TYPE"] resolver = Resolver( pid_type=legacy_pid_type, object_type="rec", getter=cls.get_record, ) _, record = resolver.resolve(str(pid_value)) return record
def test_permission(app): """Test permission control to records.""" app.config.update( WTF_CSRF_ENABLED=False, SECRET_KEY='CHANGEME', SECURITY_PASSWORD_SALT='CHANGEME', # conftest switches off permission checking, so re-enable it for this # app. RECORDS_UI_DEFAULT_PERMISSION_FACTORY='helpers:' 'only_authenticated_users', ) Menu(app) InvenioRecordsUI(app) accounts = InvenioAccounts(app) app.register_blueprint(accounts_blueprint) InvenioAccess(app) setup_record_fixture(app) # Create admin with app.app_context(): accounts.datastore.create_user( email='*****@*****.**', password=encrypt_password('123456'), active=True, ) # Get record 1 r = Resolver(pid_type='recid', object_type='rec', getter=Record.get_record) dummy_pid, record = r.resolve('1') db.session.commit() with app.test_request_context(): login_url = url_for('security.login') record_url = url_for('invenio_records_ui.recid', pid_value='1') # Access record 1 as admin with app.test_client() as client: res = client.get(record_url) assert res.status_code == 302 res = client.post(login_url, data={ 'email': '*****@*****.**', 'password': '******' }) assert res.status_code == 302 res = client.get(record_url) res.status_code == 200 # Access record 1 as anonymous with app.test_client() as client: res = client.get(record_url) res.status_code == 403
def get_record_by_pid(cls, pid, with_deleted=False): """Get ils record by pid value.""" from .config import _DOCUMENT_PID_TYPE resolver = Resolver( pid_type=cls._pid_type, object_type="rec", getter=cls.get_record, ) persistent_identifier, record = resolver.resolve(str(pid)) return record
def resolve(record_type, pid_value): """Resolve a pid value for a given record type.""" config = current_app.config['RECORDS_REST_ENDPOINTS'] config = config.get(record_type, {}) pid_type = config.get('pid_type') cfg = current_app.config['REROILS_RECORD_EDITOR_OPTIONS'].get(record_type) record_class = cfg.get('record_class', Record) resolver = Resolver(pid_type=pid_type, object_type='rec', getter=record_class.get_record) return resolver.resolve(pid_value)
def organisation_resolver(pid): """Resolve referenced organisation.""" resolver = Resolver(pid_type='org', object_type="rec", getter=Record.get_record) _, record = resolver.resolve(pid) if record.get('$schema'): del record['$schema'] return record
def get_record_by_id(recid): try: resolver = Resolver(pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(recid) return record except NoResultFound: print('No record found for recid {}'.format(recid)) return None except PIDDoesNotExistError: print('The PID {0} does not exist'.format(recid)) return None
def get_record_by_id(recid): try: resolver = Resolver(pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(recid) return record except NoResultFound: current_app.logger.exception('No record found for recid {}'.format(recid)) return None except PIDDoesNotExistError: current_app.logger.exception('The PID {0} does not exist'.format(recid)) return None
def project_resolver(pid): """Resolve referenced project.""" resolver = Resolver(pid_type='proj', object_type="rec", getter=Record.get_record) _, record = resolver.resolve(pid) if record.get('$schema'): del record['$schema'] return record
def get_record_pid_uuid(app, users, create_deposit, create_schema): owner = users['cms_user'] create_schema('deposits/records/test-v0.0.1', experiment='CMS') deposit = create_deposit(owner, 'test-v0.0.1') pid = deposit['_deposit']['id'] resolver = Resolver(pid_type='depid', object_type='rec', getter=lambda x: x) _, uuid = resolver.resolve(pid) return pid, str(uuid)
def fundertree2json(self, tree, oai_id): """Convert OpenAIRE's funder XML to JSON.""" try: tree = self.get_subtree(tree, 'fundingtree')[0] except IndexError: # pragma: nocover pass funder_node = self.get_subtree(tree, 'funder') subfunder_node = self.get_subtree(tree, '//funding_level_0') funder_id = self.get_text_node(funder_node[0], './id') \ if funder_node else None subfunder_id = self.get_text_node(subfunder_node[0], './id') \ if subfunder_node else None funder_name = self.get_text_node(funder_node[0], './shortname') \ if funder_node else "" subfunder_name = self.get_text_node(subfunder_node[0], './name') \ if subfunder_node else "" # Try to resolve the subfunder first, on failure try to resolve the # main funder, on failure raise an error. funder_doi_url = None if subfunder_id: funder_doi_url = self.funder_resolver.resolve_by_id(subfunder_id) if not funder_doi_url: if funder_id: funder_doi_url = self.funder_resolver.resolve_by_id(funder_id) if not funder_doi_url: funder_doi_url = self.funder_resolver.resolve_by_oai_id(oai_id) if not funder_doi_url: raise FunderNotFoundError(oai_id, funder_id, subfunder_id) funder_doi = FundRefDOIResolver.strip_doi_host(funder_doi_url) if not funder_name: # Grab name from FundRef record. resolver = Resolver(pid_type='frdoi', object_type='rec', getter=Record.get_record) try: dummy_pid, funder_rec = resolver.resolve(funder_doi) funder_name = funder_rec['acronyms'][0] except PersistentIdentifierError: raise OAIRELoadingError( "Please ensure that funders have been loaded prior to" "loading grants. Could not resolve funder {0}".format( funder_doi)) return dict( doi=funder_doi, url=funder_doi_url, name=funder_name, program=subfunder_name, )
def get_record_by_pid(cls, pid): """Get ils record by pid value.""" assert cls.provider resolver = Resolver(pid_type=cls.provider.pid_type, object_type=cls.object_type, getter=cls.get_record) try: persistent_identifier, record = resolver.resolve(str(pid)) return super(AuthRecord, cls).get_record(persistent_identifier.object_uuid) except PIDDoesNotExistError: return None
def can_user_review(self, obj): deposit_pid = obj.get("metadata", {}).get("_deposit", {}).get("id") resolver = Resolver(pid_type='depid', object_type='rec', getter=lambda x: x) _, rec_uuid = resolver.resolve(deposit_pid) deposit = CAPDeposit.get_record(rec_uuid) return (deposit.schema_is_reviewable() and ReviewDepositPermission(deposit).can())
def fundertree2json(self, tree, oai_id): """Convert OpenAIRE's funder XML to JSON.""" try: tree = self.get_subtree(tree, 'fundingtree')[0] except IndexError: # pragma: nocover pass funder_node = self.get_subtree(tree, 'funder') subfunder_node = self.get_subtree(tree, '//funding_level_0') funder_id = self.get_text_node(funder_node[0], './id') \ if funder_node else None subfunder_id = self.get_text_node(subfunder_node[0], './id') \ if subfunder_node else None funder_name = self.get_text_node(funder_node[0], './shortname') \ if funder_node else "" subfunder_name = self.get_text_node(subfunder_node[0], './name') \ if subfunder_node else "" # Try to resolve the subfunder first, on failure try to resolve the # main funder, on failure raise an error. funder_doi_url = None if subfunder_id: funder_doi_url = self.funder_resolver.resolve_by_id(subfunder_id) if not funder_doi_url: if funder_id: funder_doi_url = self.funder_resolver.resolve_by_id(funder_id) if not funder_doi_url: funder_doi_url = self.funder_resolver.resolve_by_oai_id(oai_id) if not funder_doi_url: raise FunderNotFoundError(oai_id, funder_id, subfunder_id) funder_doi = FundRefDOIResolver.strip_doi_host(funder_doi_url) if not funder_name: # Grab name from FundRef record. resolver = Resolver( pid_type='frdoi', object_type='rec', getter=Record.get_record) try: dummy_pid, funder_rec = resolver.resolve(funder_doi) funder_name = funder_rec['acronyms'][0] except PersistentIdentifierError: raise OAIRELoadingError( "Please ensure that funders have been loaded prior to" "loading grants. Could not resolve funder {0}".format( funder_doi)) return dict( doi=funder_doi, url=funder_doi_url, name=funder_name, program=subfunder_name, )
def test_permission(app): """Test permission control to records.""" app.config.update( WTF_CSRF_ENABLED=False, SECRET_KEY='CHANGEME', SECURITY_PASSWORD_SALT='CHANGEME', # conftest switches off permission checking, so re-enable it for this # app. RECORDS_UI_DEFAULT_PERMISSION_FACTORY='helpers:' 'only_authenticated_users', ) Menu(app) InvenioRecordsUI(app) accounts = InvenioAccounts(app) app.register_blueprint(accounts_blueprint) InvenioAccess(app) setup_record_fixture(app) # Create admin with app.app_context(): accounts.datastore.create_user( email='*****@*****.**', password=encrypt_password('123456'), active=True, ) # Get record 1 r = Resolver(pid_type='recid', object_type='rec', getter=Record.get_record) dummy_pid, record = r.resolve('1') db.session.commit() with app.test_request_context(): login_url = url_for('security.login') record_url = url_for('invenio_records_ui.recid', pid_value='1') # Access record 1 as admin with app.test_client() as client: res = client.get(record_url) assert res.status_code == 302 res = client.post(login_url, data={ 'email': '*****@*****.**', 'password': '******'}) assert res.status_code == 302 res = client.get(record_url) res.status_code == 200 # Access record 1 as anonymous with app.test_client() as client: res = client.get(record_url) res.status_code == 403
def create_or_update_record(data, pid_type, id_key, minter): """Register a funder or grant.""" resolver = Resolver( pid_type=pid_type, object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(data[id_key]) if data['remote_modified'] != record['remote_modified']: record.update(data) record.commit() db.session.commit() except PIDDoesNotExistError: record = Record.create(data) minter(record.id, data) db.session.commit()
def delete_from_orcid(sender, api=None): """Delete a record from orcid.""" api = api or current_orcid.member resolver = Resolver(pid_type='literature', object_type='rec', getter=lambda x: x) record_id = resolver.resolve(sender.get('control_number'))[ 0].object_uuid records = InspireOrcidRecords.query.filter_by(record_id=record_id).all() for record in records: raw_user = UserIdentity.query.filter_by( id=record.orcid, method='orcid').first() user = RemoteAccount.query.filter_by(user_id=raw_user.id_user).first() token = user.tokens[0].access_token api.remove_record(record.orcid, token, 'work', record.put_code) with db.session.begin_nested(): db.session.delete(record) db.session.commit()
def update_record(pid_value=None): resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(pid_value) except: abort(404) permission_edit_record = update_permission_factory(record) if not permission_edit_record.can(): abort(404) try: _metadata_patch = request.get_data() print(_metadata_patch) prepare_patch = json.loads(_metadata_patch) for m in prepare_patch: m["path"] = JSON_METADATA_PATH + m.get("path", "") record = record.patch(patch=prepare_patch) except (JsonPatchException, JsonPointerException): db.session.rollback() abort(400) try: record.commit() except ValidationError as error: print("============================") print(error.message) print("============================") db.session.rollback() resp = jsonify(**{'message': error.message}) resp.status_code = 400 return resp db.session.commit() resp = jsonify() resp.status_code = 200 return resp
def curate(community_id): """Index page with uploader and list of existing depositions. :param community_id: ID of the community to curate. """ # Does community exists u = Community.query.filter_by(id=community_id).first_or_404() if request.method == 'POST': recid = request.json.get('recid', '') # PID value of type 'recid' # 'recid' is mandatory if not recid: abort(400) resolver = Resolver(pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(recid) # Resolve recid to a Record action = request.json.get('action') # Check allowed actions and required permissions if action in ['accept', 'reject']: if u.id_user != current_user.id: abort(403) elif action == 'remove': if u.id_user != current_user.id: abort(403) else: # action not in ['accept', 'reject', 'remove'] abort(400) # Perform actions if action == "accept": u.accept_record(record) return jsonify({'status': 'success'}) elif action == "reject": u.reject_record(record) return jsonify({'status': 'success'}) else: # action == "remove" u.remove_record(record) return jsonify({'status': 'success'}) ctx = {'community': u} return render_template('invenio_communities/curate.html', **ctx)
def __init__(self, data, source_type='marcxml', latest_only=False, pid_fetchers=None, dojson_model=marc21): """Initialize class.""" self.resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) self.data = data self.source_type = source_type self.latest_only = latest_only self.dojson_model = dojson_model self.revisions = None self.pid_fetchers = pid_fetchers or []
def delete_from_orcid(sender, api=None): """Delete a record from orcid.""" api = api or current_orcid.member pid_type = current_app.config['ORCID_RECORDS_PID_TYPE'] resolver = Resolver(pid_type=pid_type, object_type='rec', getter=lambda x: x) fetcher_name = current_app.config['ORCID_RECORDS_PID_FETCHER'] pid = current_pidstore.fetchers[fetcher_name](None, sender) record_identifier = pid.pid_value record_id = resolver.resolve(record_identifier)[0].object_uuid records = ORCIDRecords.query.filter_by(record_id=record_id).all() for record in records: raw_user = UserIdentity.query.filter_by( id=record.orcid, method='orcid').first() user = RemoteAccount.query.filter_by(user_id=raw_user.id_user).first() token = user.remote_tokens[0].access_token api.remove_record(record.orcid, token, 'work', record.put_code) with db.session.begin_nested(): db.session.delete(record) db.session.commit()
def edit_record(pid_value=None): resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(pid_value) except: abort(404) permission_edit_record = update_permission_factory(record) if permission_edit_record.can(): return record_view(pid_value, resolver, 'records/edit.html', None, default_view_method ) abort(403)
def curate(community): """Index page with uploader and list of existing depositions. :param community_id: ID of the community to curate. """ if request.method == 'POST': action = request.json.get('action') recid = request.json.get('recid') # 'recid' is mandatory if not recid: abort(400) if action not in ['accept', 'reject', 'remove']: abort(400) # Resolve recid to a Record resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(recid) # Perform actions if action == "accept": community.accept_record(record) elif action == "reject": community.reject_record(record) elif action == "remove": community.remove_record(record) record.commit() db.session.commit() RecordIndexer().index_by_id(record.id) return jsonify({'status': 'success'}) ctx = {'community': community} return render_template( current_app.config['COMMUNITIES_CURATE_TEMPLATE'], **ctx )
def test_demo_cmd_load(app): """Test the `load` CLI command.""" with app.app_context(): runner = CliRunner() script_info = ScriptInfo(create_app=lambda info: app) # Run 'load' command with runner.isolated_filesystem(): result = runner.invoke(schemas_cmd, ['init'], obj=script_info) assert result.exit_code == 0 result = runner.invoke(demo_cmd, ['load_data'], obj=script_info) assert result.exit_code == 0 result = runner.invoke(demo_cmd, ['load_config'], obj=script_info) # assert result.exit_code == 0 # FIXME: check that the config is loaded resolver = Resolver(pid_type='b2rec', object_type='rec', getter=partial(Record.get_record, with_deleted=True)) # check that the loaded record exists resolved = resolver.resolve('a1c2ef96a1e446fa9bd7a2a46d2242d4') schema = resolved[1]['$schema'] assert urlparse(schema).scheme == app.config['PREFERRED_URL_SCHEME']
def change_record_privacy(pid_value=None): resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(pid_value) permission_update_record = update_permission_factory(record) if not permission_update_record.can(): abort(403) index_instance = ActionUsers.query.filter( ActionUsers.action == "records-index", ActionUsers.argument == str(record.id), ActionUsers.user_id.is_(None)).first() read_instance = ActionUsers.query.filter( ActionUsers.action == "records-read", ActionUsers.argument == str(record.id), ActionUsers.user_id.is_(None)).first() with db.session.begin_nested(): if index_instance: db.session.delete(index_instance) db.session.delete(read_instance) else: action_read_record = RecordReadActionNeed(str(record.id)) action_index_record = RecordIndexActionNeed(str(record.id)) db.session.add(ActionUsers.allow(action_read_record)) db.session.add(ActionUsers.allow(action_index_record)) db.session.commit() resp = jsonify() resp.status_code = 200 return resp
def test_resolver(app): """Test the class methods of PersistentIdentifier class.""" status = [ PIDStatus.NEW, PIDStatus.RESERVED, PIDStatus.REGISTERED, PIDStatus.DELETED, ] with app.app_context(): i = 1 rec_a = uuid.uuid4() # Create pids for each status with and without object for s in status: PersistentIdentifier.create('recid', i, status=s) i += 1 if s != PIDStatus.DELETED: PersistentIdentifier.create( 'recid', i, status=s, object_type='rec', object_uuid=rec_a) i += 1 # Create a DOI pid_doi = PersistentIdentifier.create( 'doi', '10.1234/foo', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=rec_a) # Create redirects pid = PersistentIdentifier.create( 'recid', i, status=PIDStatus.REGISTERED) i += 1 pid.redirect(PersistentIdentifier.get('recid', '2')) pid = PersistentIdentifier.create( 'recid', i, status=PIDStatus.REGISTERED) pid.redirect(pid_doi) db.session.commit() # Start tests resolver = Resolver( pid_type='recid', object_type='rec', getter=lambda x: x) # Resolve non-existing pid pytest.raises(PIDDoesNotExistError, resolver.resolve, '100') pytest.raises(PIDDoesNotExistError, resolver.resolve, '10.1234/foo') # Resolve status new pytest.raises(PIDUnregistered, resolver.resolve, '1') pytest.raises(PIDUnregistered, resolver.resolve, '2') # Resolve status reserved pytest.raises(PIDUnregistered, resolver.resolve, '3') pytest.raises(PIDUnregistered, resolver.resolve, '4') # Resolve status registered pytest.raises(PIDMissingObjectError, resolver.resolve, '5') pid, obj = resolver.resolve('6') assert pid and obj == rec_a # Resolve status deleted pytest.raises(PIDDeletedError, resolver.resolve, '7') # Resolve status redirected try: resolver.resolve('8') assert False except PIDRedirectedError as e: assert e.destination_pid.pid_type == 'recid' assert e.destination_pid.pid_value == '2' try: resolver.resolve('9') assert False except PIDRedirectedError as e: assert e.destination_pid.pid_type == 'doi' assert e.destination_pid.pid_value == '10.1234/foo' doiresolver = Resolver( pid_type='doi', object_type='rec', getter=lambda x: x) pytest.raises(PIDDoesNotExistError, doiresolver.resolve, '1') pid, obj = doiresolver.resolve('10.1234/foo') assert pid and obj == rec_a
def test_getting_record_with_external_pids(app, login_user, test_users, deposit_with_external_pids, records_data_with_external_pids): """External pids are serialized in the metadata when it is allowed.""" def test_get_deposit(deposit_pid_value, user): with app.test_client() as client: login_user(user, client) deposit_url = url_for('b2share_deposit_rest.b2dep_item', pid_value=deposit_pid_value) resp = client.get(deposit_url) deposit_data = json.loads( resp.get_data(as_text=True)) return deposit_data def test_get_record(record_pid_value, user): with app.test_client() as client: login_user(user, client) record_url = url_for('b2share_records_rest.b2rec_item', pid_value=record_pid_value) resp = client.get(record_url) record_data = json.loads( resp.get_data(as_text=True)) return record_data def test_search_deposits(user): with app.test_client() as client: login_user(user, client) search_deposits_url = url_for( 'b2share_records_rest.b2rec_list', drafts=1, size=100) headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] resp = client.get( search_deposits_url, headers=headers) deposit_search_res = json.loads( resp.get_data(as_text=True)) return deposit_search_res def test_search_records(user): with app.test_client() as client: login_user(user, client) search_records_url = url_for( 'b2share_records_rest.b2rec_list', size=100) headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] resp = client.get( search_records_url, headers=headers) record_search_res = json.loads( resp.get_data(as_text=True)) return record_search_res with app.app_context(): deposit = Deposit.get_record(deposit_with_external_pids.deposit_id) with app.app_context(): deposit_data = test_get_deposit(deposit.pid.pid_value, test_users['deposits_creator']) # assert that the external_pids are visible # when getting a specific deposit assert_external_files( deposit, deposit_data['metadata']['external_pids']) current_search_client.indices.refresh('*') deposit_search_data = test_search_deposits( test_users['deposits_creator']) assert deposit_search_data['hits']['total'] == 1 # external_pids are not shown in a deposit search because it would use # too much resources to generate it for each search hit. assert 'external_pids' not in deposit_search_data[ 'hits']['hits'][0]['metadata'] with app.app_context(): deposit = Deposit.get_record(deposit_with_external_pids.deposit_id) deposit.submit() deposit.publish() record_resolver = Resolver( pid_type='b2rec', object_type='rec', getter=B2ShareRecord.get_record, ) record_pid, record = record_resolver.resolve(deposit.pid.pid_value) current_search_client.indices.refresh('*') record_data = test_get_record( record_pid.pid_value, test_users['deposits_creator']) # when getting a specific record the owner sees the external_pids assert_external_files(record, record_data['metadata']['external_pids']) with app.app_context(): record_data = test_get_record( record_pid.pid_value, test_users['normal']) # and all other users as well if it is open access assert_external_files(record, record_data['metadata']['external_pids']) deposit_search_data = test_search_deposits( test_users['deposits_creator']) assert deposit_search_data['hits']['total'] == 1 # external_pids are not shown in deposit search even when published assert 'external_pids' not in deposit_search_data[ 'hits']['hits'][0]['metadata'] record_search_data = test_search_records( test_users['deposits_creator']) assert record_search_data['hits']['total'] == 1 # external_pids are shown for record search if they are open access # for all users assert 'external_pids' in record_search_data[ 'hits']['hits'][0]['metadata'] record_search_data = test_search_records(test_users['normal']) assert record_search_data['hits']['total'] == 1 assert 'external_pids' in record_search_data[ 'hits']['hits'][0]['metadata'] with app.app_context(): deposit2 = create_deposit(records_data_with_external_pids, test_users['deposits_creator']) deposit2 = deposit2.patch([ {'op': 'add', 'path': '/embargo_date', 'value': (datetime.utcnow() + timedelta(days=1)).isoformat()}, {'op': 'replace', 'path': '/open_access', 'value': False} ]) deposit2.commit() deposit2.submit() deposit2.publish() record_resolver = Resolver( pid_type='b2rec', object_type='rec', getter=B2ShareRecord.get_record, ) record_pid, record = record_resolver.resolve(deposit2.pid.pid_value) record_data = test_get_record( record_pid.pid_value, test_users['deposits_creator']) # owners of records have access to files and # external_pids of embargoed records assert_external_files(record, record_data['metadata']['external_pids']) with app.app_context(): record_data = test_get_record( record_pid.pid_value, test_users['normal']) # normal users shouldn't have access to the # files and external_pids of an embargoed record assert 'metadata' in record_data assert 'external_pids' not in record_data['metadata']
class RecordDump(object): """Record dump wrapper. Wrapper around a record dump, with tools for loading the dump. Extend this class to provide custom behavior for loading of record dumps. Known limitations: - Only persistent identifiers present in the last revision of the record will be registered. """ def __init__(self, data, source_type='marcxml', latest_only=False, pid_fetchers=None, dojson_model=marc21): """Initialize class.""" self.resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) self.data = data self.source_type = source_type self.latest_only = latest_only self.dojson_model = dojson_model self.revisions = None self.pid_fetchers = pid_fetchers or [] @cached_property def record(self): """Get the first revision.""" try: return self.resolver.resolve(self.data['recid'])[1] except PIDDoesNotExistError: return None @cached_property def missing_pids(self): """Filter persistent identifiers.""" missing = [] for p in self.pids: try: PersistentIdentifier.get(p.pid_type, p.pid_value) except PIDDoesNotExistError: missing.append(p) return missing @cached_property def recid(self): """Get recid.""" return self.data['recid'] def _prepare_revision(self, data): dt = arrow.get(data['modification_datetime']).datetime if self.source_type == 'marcxml': val = self.dojson_model.do(create_record(data['marcxml'])) else: val = data['json'] return (dt, val) def prepare_revisions(self): """Prepare data.""" # Prepare revisions self.revisions = [] it = [self.data['record'][0]] if self.latest_only \ else self.data['record'] for i in it: self.revisions.append(self._prepare_revision(i)) def prepare_files(self): """Get files from data dump.""" # Prepare files files = {} for f in self.data['files']: k = f['full_name'] if k not in files: files[k] = [] files[k].append(f) # Sort versions for k in files.keys(): files[k].sort(key=lambda x: x['version']) self.files = files def prepare_pids(self): """Prepare persistent identifiers.""" self.pids = [] for fetcher in self.pid_fetchers: val = fetcher(None, self.revisions[-1][1]) if val: self.pids.append(val) def pop_first_revision(self): """Get the first revision.""" return self.revisions.pop(0) def is_deleted(self, record=None): """Check if record is deleted.""" record = record or self.revisions[-1][1] return any( col == 'deleted' for col in record.get('collections', []) )
def suggest(): """Index page with uploader and list of existing depositions. :param community_id: ID of the community to curate. """ community = None record = None url = request.referrer if "url" in request.values and request.values["url"]: url = request.values["url"] if not "community" in request.values: flash(u"Error, no {} given".format( current_app.config["COMMUNITIES_NAME"]), "danger") return redirect(url) community_id = request.values["community"] community = Community.get(community_id) if not community: flash(u"Error, unknown {} {}".format( current_app.config["COMMUNITIES_NAME"], community_id), "danger") return redirect(url) if not _get_permission("communities-read", community).can() \ and not DynamicPermission(ActionNeed('admin-access')).can(): flash(u"Error, you don't have permissions on the {} {}".format( current_app.config["COMMUNITIES_NAME"], community_id), "danger") return redirect(url) if not "recpid" in request.values: flash(u"Error, no record given", "danger") return redirect(url) recid = request.values["recpid"] resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(recid) except Exception: flash(u"Error, unkown record {}".format(recid), "danger") return redirect(url) # if the user has the curate permission on this community, # we automatically add the record if _get_permission("communities-curate", community).can(): try: community.add_record(record) except: # the record is already in the community flash(u"The record already exists in the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title), "warning") else: record.commit() flash(u"The record has been added to the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title)) # otherwise we only suggest it and it will appear in the curate list else: try: InclusionRequest.create(community=community, record=record, user=current_user) except InclusionRequestObsoleteError: # the record is already in the community flash(u"The record already exists in the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title), "warning") except InclusionRequestExistsError: flash(u"The record has already been suggested " u"to the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title), "warning") else: flash(u"The record has been suggested " u"to the {} {}.".format( current_app.config["COMMUNITIES_NAME"], community.title)) db.session.commit() RecordIndexer().index_by_id(record.id) return redirect(url)
def test_record_delete_version(app, test_records, test_users): """Test deletion of a record version.""" with app.app_context(): resolver = Resolver( pid_type='b2rec', object_type='rec', getter=B2ShareRecord.get_record, ) v1 = test_records[0].data v1_pid, v1_id = pid_of(v1) _, v1_rec = resolver.resolve(v1_id) data = copy_data_from_previous(v1_rec.model.json) v2 = create_deposit(data, test_users['deposits_creator'], version_of=v1_id) ObjectVersion.create(v2.files.bucket, 'myfile1', stream=BytesIO(b'mycontent')) v2.submit() v2.publish() v2_pid, v2_id = pid_of(v2) data = copy_data_from_previous(v2.model.json) v3 = create_deposit(data, test_users['deposits_creator'], version_of=v2_id) v3.submit() v3.publish() v3_pid, v3_id = pid_of(v3) v3_pid, v3_rec = resolver.resolve(v3_pid.pid_value) # chain is now: [v1] -- [v2] -- [v3] version_child = PIDVersioning(child=v2_pid) version_master = PIDVersioning(parent=version_child.parent) assert len(version_master.children.all()) == 3 v3_rec.delete() assert len(version_master.children.all()) == 2 # chain is now [v1] -- [v2] # assert that we can create again a new version from v2 data = copy_data_from_previous(v2.model.json) v3 = create_deposit(data, test_users['deposits_creator'], version_of=v2_id) v3.submit() v3.publish() v3_pid, v3_id = pid_of(v3) v3_pid, v3_rec = resolver.resolve(v3_pid.pid_value) assert len(version_master.children.all()) == 3 v2_pid, v2_rec = resolver.resolve(v2_pid.pid_value) # Delete an intermediate version v2_rec.delete() assert len(version_master.children.all()) == 2 # chain is now [v1] -- [v3] # Add a new version data = copy_data_from_previous(v3.model.json) v4 = create_deposit(data, test_users['deposits_creator'], version_of=v3_id) v4.submit() v4.publish() assert len(version_master.children.all()) == 3 # final chain [v1] -- [v3] -- [v4] v4_pid, v4_id = pid_of(v4) v4_pid, v4_rec = resolver.resolve(v4_pid.pid_value) data = copy_data_from_previous(v4) draft_child = create_deposit(data, test_users['deposits_creator'], version_of=v4_id) draft_child.submit() # delete all children except the draft child assert len(version_master.children.all()) == 3 v4_rec.delete() assert len(version_master.children.all()) == 2 v3_rec.delete() assert len(version_master.children.all()) == 1 v1_rec.delete() assert len(version_master.children.all()) == 0 assert version_master.parent.status != PIDStatus.DELETED draft_child.publish() draft_child_pid, draft_child_id = pid_of(draft_child) draft_child_pid, draft_child_rec = \ resolver.resolve(draft_child_pid.pid_value) # assert that we can create again a new version assert len(version_master.children.all()) == 1 # no child remains and there is no draft_child draft_child_rec.delete() assert version_master.parent.status == PIDStatus.DELETED
def update_record_permissions(pid_value=None): resolver = Resolver( pid_type='recid', object_type='rec', getter=Record.get_record) pid, record = resolver.resolve(pid_value) emails = [] roles = [] userrole_list = request.get_json().keys() if not (current_app.config.get('EMAIL_REGEX', None)): resp = jsonify(**{message: "ERROR in email regex ;)"}) resp.status_code = 400 return resp email_regex = re.compile(current_app.config.get('EMAIL_REGEX'), ) for userrole in userrole_list: if email_regex.match(userrole): emails.append(userrole) else: #: [TOBEFIXED] Needs to check if E-Group exists try: role = Role.query.filter(Role.name == userrole).first() if not role: tmp_role = _datastore.create_role(name=userrole) roles.append(userrole) except: print("Something happened when trying to create '"+userrole+"' role") # Role.add(tmp_role) users = User.query.filter(User.email.in_(emails)).all() roles = Role.query.filter(Role.name.in_(roles)).all() action_edit_record = RecordUpdateActionNeed(str(record.id)) action_read_record = RecordReadActionNeed(str(record.id)) action_index_record = RecordIndexActionNeed(str(record.id)) with db.session.begin_nested(): for user in users: for action in request.get_json().get(user.email, None): if (action.get("action", None) == "records-read" and action.get("op", None) == "add"): db.session.add(ActionUsers.allow(action_read_record, user=user)) elif (action.get("action", None) == "records-index" and action.get("op", None) == "add"): db.session.add(ActionUsers.allow(action_index_record, user=user)) elif (action.get("action", None) == "records-update" and action.get("op", None) == "add"): db.session.add(ActionUsers.allow(action_edit_record, user=user)) elif (action.get("action", None) == "records-read" and action.get("op", None) == "remove"): au = ActionUsers.query.filter(ActionUsers.action == "records-read", ActionUsers.argument == str(record.id), ActionUsers.user_id == user.id).first() if (au): db.session.delete(au) elif (action.get("action", None) == "records-index" and action.get("op", None) == "remove"): au = ActionUsers.query.filter(ActionUsers.action == "records-index", ActionUsers.argument == str(record.id), ActionUsers.user_id == user.id).first() if (au): db.session.delete(au) elif (action.get("action", None) == "records-update" and action.get("op", None) == "remove"): au = ActionUsers.query.filter(ActionUsers.action == "records-update", ActionUsers.argument == str(record.id), ActionUsers.user_id == user.id).first() if (au): db.session.delete(au) # db.session.begin(nested=True) for role in roles: for action in request.get_json().get(role.name, None): if (action.get("action", None) == "records-read" and action.get("op", None) == "add"): db.session.add(ActionRoles.allow(action_read_record, role=role)) elif (action.get("action", None) == "records-index" and action.get("op", None) == "add"): db.session.add(ActionRoles.allow(action_index_record, role=role)) elif (action.get("action", None) == "records-update" and action.get("op", None) == "add"): db.session.add(ActionRoles.allow(action_edit_record, role=role)) elif (action.get("action", None) == "records-read" and action.get("op", None) == "remove"): au = ActionRoles.query.filter(ActionRoles.action == "records-read", ActionRoles.argument == str(record.id), ActionRoles.role_id == role.id).first() if (au): db.session.delete(au) elif (action.get("action", None) == "records-index" and action.get("op", None) == "remove"): au = ActionRoles.query.filter(ActionRoles.action == "records-index", ActionRoles.argument == str(record.id), ActionRoles.role_id == role.id).first() if (au): db.session.delete(au) elif (action.get("action", None) == "records-update" and action.get("op", None) == "remove"): au = ActionRoles.query.filter(ActionRoles.action == "records-update", ActionRoles.argument == str(record.id), ActionRoles.role_id == role.id).first() if (au): db.session.delete(au) db.session.commit() resp = jsonify() resp.status_code = 200 return resp