def _delete_record(pid_type, pid_value): get_db_record(pid_type, pid_value)._delete(force=True) pid = PersistentIdentifier.get(pid_type, pid_value) PersistentIdentifier.delete(pid) object_uuid = pid.object_uuid PersistentIdentifier.query.filter( object_uuid == PersistentIdentifier.object_uuid).delete() db.session.commit()
def _delete_record(pid_type, pid_value): get_db_record(pid_type, pid_value)._delete(force=True) pid = PersistentIdentifier.get(pid_type, pid_value) PersistentIdentifier.delete(pid) object_uuid = pid.object_uuid PersistentIdentifier.query.filter( object_uuid == PersistentIdentifier.object_uuid).delete() db.session.commit()
def jhep_with_malformed_title(app): """Temporarily add a malformed title to the JHEP record.""" record = get_db_record('jou', 1213103) record['title_variants'].append('+++++') record_insert_or_replace(record) yield record = get_db_record('jou', 1213103) record['title_variants'] = record['title_variants'][:-1] record_insert_or_replace(record)
def book_with_another_document_type(app): """Temporarily add another document type to a book record.""" record = get_db_record('lit', 1373790) record['document_type'] = ['book', 'proceedings'] record_insert_or_replace(record) yield record = get_db_record('lit', 1373790) record['document_type'] = ['book'] record_insert_or_replace(record)
def jhep_with_malformed_title(app): """Temporarily add a malformed title to the JHEP record.""" record = get_db_record('jou', 1213103) record['title_variants'].append('+++++') record_insert_or_replace(record) yield record = get_db_record('jou', 1213103) record['title_variants'] = record['title_variants'][:-1] record_insert_or_replace(record)
def start_merger(head_id, update_id, current_user_id=None): """Start a new ManualMerge workflow to merge two records manually. Args: head_id: the id of the first record to merge. This record is the one that will be updated with the new information. update_id: the id of the second record to merge. This record is the one that is going to be deleted and replaced by `head`. current_user_id: Id of the current user provided by the Flask app. Returns: (int): the current workflow object's id. """ data = { 'pid_type': 'lit', # TODO: support 'recid_head': head_id, 'recid_update': update_id, } head = get_db_record('lit', head_id) update = get_db_record('lit', update_id) workflow_object = workflow_object_class.create(data=None, id_user=current_user_id, data_type='hep') wf_id = workflow_object.id # to retrieve it later workflow_object.extra_data.update(data) # preparing identifiers in order to do less requests possible later head_source = get_head_source(head.id) or merger_get_source(head) update_source = get_source(update) update_source = update_source if update_source else 'arxiv' workflow_object.extra_data['head_source'] = head_source.lower() workflow_object.extra_data['update_source'] = update_source.lower() workflow_object.extra_data['head_control_number'] = head_id workflow_object.extra_data['update_control_number'] = update_id workflow_object.extra_data['head_uuid'] = str(head.id) workflow_object.extra_data['update_uuid'] = str(update.id) workflow_object.extra_data['head'] = head workflow_object.extra_data['update'] = update workflow_object.save() start('manual_merge', object_id=wf_id) return wf_id
def jhep_with_malformed_title(app): """Temporarily add a malformed title to the JHEP record.""" record = get_db_record('jou', 1213103) record['title_variants'].append('+++++') record = InspireRecord.create_or_update(record) record.commit() yield record = get_db_record('jou', 1213103) record['title_variants'] = record['title_variants'][:-1] record = InspireRecord.create_or_update(record) record.commit()
def book_with_another_document_type(app): """Temporarily add another document type to a book record.""" record = get_db_record('lit', 1373790) record['document_type'] = ['book', 'proceedings'] record = InspireRecord.create_or_update(record) record.commit() yield record = get_db_record('lit', 1373790) record['document_type'] = ['book'] record = InspireRecord.create_or_update(record) record.commit()
def test_records_can_be_merged(api_client, not_yet_merged_records): assert api_client.get('/literature/111').status_code == 200 assert api_client.get('/literature/222').status_code == 200 merged_record = get_db_record('lit', 111) deleted_record = get_db_record('lit', 222) deleted_record['deleted'] = True deleted_record['new_record'] = {'$ref': 'http://localhost:5000/api/record/111'} deleted_record.merge(merged_record) db.session.commit() assert api_client.get('/literature/111').status_code == 200 assert api_client.get('/literature/222').status_code == 301
def book_with_another_document_type(app): """Temporarily add another document type to a book record.""" record = get_db_record('lit', 1373790) record['document_type'] = ['book', 'proceedings'] record = InspireRecord.create_or_update(record) record.commit() yield record = get_db_record('lit', 1373790) record['document_type'] = ['book'] record = InspireRecord.create_or_update(record) record.commit()
def jhep_with_malformed_title(app): """Temporarily add a malformed title to the JHEP record.""" record = get_db_record('jou', 1213103) record['title_variants'].append('+++++') record = InspireRecord.create_or_update(record) record.commit() yield record = get_db_record('jou', 1213103) record['title_variants'] = record['title_variants'][:-1] record = InspireRecord.create_or_update(record) record.commit()
def cern_with_hal_id(app): """Temporarily add the HAL id to the CERN record.""" record = get_db_record('ins', 902725) record['external_system_identifiers'] = [{'schema': 'HAL', 'value': '300037'}] record_insert_or_replace(record) es.indices.refresh('records-institutions') yield record = get_db_record('ins', 902725) del record['external_system_identifiers'] record_insert_or_replace(record) es.indices.refresh('records-institutions')
def test_records_can_be_merged(api_client, not_yet_merged_records): assert api_client.get('/literature/111').status_code == 200 assert api_client.get('/literature/222').status_code == 200 merged_record = get_db_record('lit', 111) deleted_record = get_db_record('lit', 222) deleted_record['deleted'] = True deleted_record['new_record'] = {'$ref': 'http://localhost:5000/api/record/111'} deleted_record.merge(merged_record) db.session.commit() assert api_client.get('/literature/111').status_code == 200 assert api_client.get('/literature/222').status_code == 301
def start_merger(head_id, update_id, current_user_id=None): """Start a new ManualMerge workflow to merge two records manually. Args: head_id: the id of the first record to merge. This record is the one that will be updated with the new information. update_id: the id of the second record to merge. This record is the one that is going to be deleted and replaced by `head`. current_user_id: Id of the current user provided by the Flask app. Returns: (int): the current workflow object's id. """ data = { 'pid_type': 'lit', # TODO: support 'recid_head': head_id, 'recid_update': update_id, } head = get_db_record('lit', head_id) update = get_db_record('lit', update_id) workflow_object = workflow_object_class.create( data=None, id_user=current_user_id, data_type='hep' ) wf_id = workflow_object.id # to retrieve it later workflow_object.extra_data.update(data) update_source = LiteratureReader(update).source update_source = update_source if update_source else 'arxiv' workflow_object.extra_data['update_source'] = update_source.lower() workflow_object.extra_data['head_control_number'] = head_id workflow_object.extra_data['update_control_number'] = update_id workflow_object.extra_data['head_uuid'] = str(head.id) workflow_object.extra_data['update_uuid'] = str(update.id) workflow_object.extra_data['head'] = head workflow_object.extra_data['update'] = update workflow_object.save() start('manual_merge', object_id=wf_id) return wf_id
def _delete_record(pid_type, pid_value): get_db_record(pid_type, pid_value)._delete(force=True) pid = PersistentIdentifier.get(pid_type, pid_value) PersistentIdentifier.delete(pid) recpid = RecordIdentifier.query.filter_by(recid=pid_value).one_or_none() if recpid: db.session.delete(recpid) object_uuid = pid.object_uuid PersistentIdentifier.query.filter( object_uuid == PersistentIdentifier.object_uuid).delete() db.session.commit()
def _delete_record(pid_type, pid_value): get_db_record(pid_type, pid_value)._delete(force=True) pid = PersistentIdentifier.get(pid_type, pid_value) PersistentIdentifier.delete(pid) recpid = RecordIdentifier.query.filter_by(recid=pid_value).one_or_none() if recpid: db.session.delete(recpid) object_uuid = pid.object_uuid PersistentIdentifier.query.filter( object_uuid == PersistentIdentifier.object_uuid).delete() db.session.commit()
def test_references_can_be_updated(app, records_to_be_merged): merged_record = get_db_record('lit', 111) deleted_record = get_db_record('lit', 222) deleted_record.merge(merged_record) update_refs.delay('http://localhost:5000/api/literature/222', 'http://localhost:5000/api/literature/111') pointing_record = get_db_record('lit', 333) expected = 'http://localhost:5000/api/literature/111' result = get_value(pointing_record, 'accelerator_experiments[0].record.$ref') assert expected == result
def test_continuous_migration_handles_a_single_record(app, record_1502656): r = StrictRedis.from_url(current_app.config.get('CACHE_REDIS_URL')) assert r.lrange('legacy_records', 0, 0) != [] continuous_migration() assert r.lrange('legacy_records', 0, 0) == [] get_db_record('lit', 1502656) # Does not raise. expected = record_1502656 result = InspireProdRecords.query.get(1502656).marcxml assert expected == result
def test_continuous_migration_handles_a_single_record(app, record_1502656): r = StrictRedis.from_url(current_app.config.get('CACHE_REDIS_URL')) assert r.lrange('legacy_records', 0, 0) != [] continuous_migration() assert r.lrange('legacy_records', 0, 0) == [] get_db_record('lit', 1502656) # Does not raise. expected = record_1502656 result = InspireProdRecords.query.get(1502656).marcxml assert expected == result
def resolve_missmatch_version_with_legacy(workflow_id, legacy_revision): """Revert record revision to be the same with the legacy version. Example :: resolve_missmatch_version_with_legacy(1236029, '20180926071008.0') """ obj = workflow_object_class.get(workflow_id) record = get_db_record('lit', obj.data['control_number']) revisions = [ revision for revision in record.revisions if revision.get('legacy_version') == legacy_revision ] if not revisions: print('revision {} not found'.format(legacy_revision)) return None print('revision found.') revision = revisions.pop() record.clear() record.update(revision, skip_files=True) record.commit() obj.callback_pos = [0] obj.save() db.session.commit() response = obj.continue_workflow(delayed=True) print 'Workflow {} currently in status {}'.format(workflow_id, response.status)
def get_revisions(endpoint, pid_value): """Get revisions of given record""" Transaction = transaction_class(RecordMetadata) pid_type = get_pid_type_from_endpoint(endpoint) record = get_db_record(pid_type, pid_value) revisions = [] for revision in reversed(record.revisions): transaction_id = revision.model.transaction_id user = Transaction.query.filter( Transaction.id == transaction_id).one().user if user: user_email = user.email else: user_email = 'system' revisions.append({ 'updated': revision.updated, 'revision_id': revision.revision_id, 'user_email': user_email, 'transaction_id': transaction_id, 'rec_uuid': record.id }) return jsonify(revisions)
def get_revisions(endpoint, pid_value): """Get revisions of given record""" Transaction = transaction_class(RecordMetadata) pid_type = get_pid_type_from_endpoint(endpoint) record = get_db_record(pid_type, pid_value) revisions = [] for revision in reversed(record.revisions): transaction_id = revision.model.transaction_id user = Transaction.query.filter( Transaction.id == transaction_id).one().user if user: user_email = user.email else: user_email = 'system' revisions.append({ 'updated': revision.updated, 'revision_id': revision.revision_id, 'user_email': user_email, 'transaction_id': transaction_id, 'rec_uuid': record.id }) return jsonify(revisions)
def orcid_test(mock_user, request): """Orcid test fixture.""" app = mock_user.app def teardown(app): with app.app_context(): es.delete(index='records-authors', doc_type='authors', id=10) record = { "name": { "status": "ACTIVE", "preferred_name": "Full Name", "value": "Full Name" }, "$schema": "http://localhost:5000/schemas/records/authors.json", "control_number": "10", "self": { "$ref": "http://localhost:5000/api/authors/10" }, "ids": [{ "type": "INSPIRE", "value": "INSPIRE-0000000" }, { "type": "ORCID", "value": "0000-0001-9412-8627" }], "self_recid": 10, "earliest_date": "2015-09-23" } request.addfinalizer(lambda: teardown(app)) with app.app_context(): es.index(index='records-authors', doc_type='authors', id=10, body=record) es.indices.refresh('records-authors') record = get_db_record('literature', 782466) record['authors'].append({ u'affiliations': [{ u'value': u'St. Petersburg, INP' }], u'curated_relation': True, u'full_name': u'Full, Name', u'profile': { u'__url__': u'http://inspirehep.net/record/00000000' }, u'record': { u'$ref': u'http://localhost:5000/api/authors/10' } }) mock_orcid_api = OrcidApiMock(1) return mock_orcid_api, record
def start_edit_article_workflow(recid): try: record = get_db_record('lit', recid) except RecordGetterError: raise CallbackRecordNotFoundError(recid) record_permission = RecordPermission.create(action='update', record=record) if not record_permission.can(): abort(403, record_permission) # has to be done before start() since, it is deattaching this session user_id = current_user.get_id() eng_uuid = start('edit_article', data=record) workflow_id = WorkflowEngine.from_uuid(eng_uuid).objects[0].id workflow = workflow_object_class.get(workflow_id) workflow.id_user = user_id if request.referrer: base_rt_url = get_rt_link_for_ticket('').replace('?', '\?') ticket_match = re.match(base_rt_url + '(?P<ticket_id>\d+)', request.referrer) if ticket_match: ticket_id = int(ticket_match.group('ticket_id')) workflow.extra_data['curation_ticket_id'] = ticket_id workflow.save() db.session.commit() url = "{}{}".format(current_app.config['WORKFLOWS_EDITOR_API_URL'], workflow_id) return redirect(location=url, code=302)
def test_record_enhanced_in_es_and_not_enhanced_in_db(app): record_json = { '$schema': 'http://localhost:5000/schemas/records/hep.json', 'document_type': [ 'article', ], 'control_number': 111, 'titles': [ { 'title': 'Jessica Jones', }, ], '_collections': ['Literature'], 'references': [{ 'record': { '$ref': 'http://localhost:5000/api/literature/1498589' } }] } record = InspireRecord.create(record_json) record.commit() db.session.commit() es.indices.refresh('records-hep') rec1 = get_db_record('lit', 111) rec2 = get_es_record('lit', 111) assert 'facet_author_name' not in rec1 assert 'facet_author_name' in rec2 _delete_record('lit', 111)
def check_missing_records_in_es(data_output): """Checks if all not deleted records from pidstore are also in ElasticSearch""" all_records = int( PersistentIdentifier.query.filter( PersistentIdentifier.pid_type == 'lit').count()) _prepare_logdir(data_output) click.echo("All missing records pids will be saved in %s file" % data_output) missing = 0 _query = _gen_query( PersistentIdentifier.query.filter( PersistentIdentifier.pid_type == 'lit')) with click.progressbar(_query, length=all_records, label="Processing pids (%s pids)..." % all_records) as pidstore: with open(data_output, 'w') as data_file: for pid in pidstore: db_rec = get_db_record('lit', pid.pid_value) if db_rec.get('deleted'): continue try: get_es_record('lit', pid.pid_value) except RecordGetterError: missing += 1 data_file.write("%s\n" % pid.pid_value) data_file.flush() click.echo("%s records are missing from es" % missing)
def cern_with_hal_id(app): """Temporarily add the HAL id to the CERN record.""" record = get_db_record('ins', 902725) record['external_system_identifiers'] = [{ 'schema': 'HAL', 'value': '300037' }] record_insert_or_replace(record) es.indices.refresh('records-institutions') yield record = get_db_record('ins', 902725) del record['external_system_identifiers'] record_insert_or_replace(record) es.indices.refresh('records-institutions')
def record_upsert(json): """Insert or update a record.""" control_number = json.get('control_number', json.get('recid')) if control_number: control_number = int(control_number) pid_type = InspireRecordIdProvider.schema_to_pid_type(json['$schema']) try: pid = PersistentIdentifier.get(pid_type, control_number) record = Record.get_record(pid.object_uuid) record.update(json) record.commit() except PIDDoesNotExistError: record = Record.create(json, id_=None) # Create persistent identifier. inspire_recid_minter(str(record.id), json) if json.get('deleted'): new_recid = get_recid_from_ref(json.get('new_record')) if new_recid: merged_record = get_db_record(pid_type, new_recid) merge_pidstores_of_two_merged_records(merged_record.id, record.id) else: soft_delete_pidstore_for_record(record.id) return record
def start_edit_article_workflow(recid): try: record = get_db_record('lit', recid) except RecordGetterError: raise CallbackRecordNotFoundError(recid) record_permission = RecordPermission.create(action='update', record=record) if not record_permission.can(): abort(403, record_permission) # has to be done before start() since, it is deattaching this session user_id = current_user.get_id() eng_uuid = start('edit_article', data=record) workflow_id = WorkflowEngine.from_uuid(eng_uuid).objects[0].id workflow = workflow_object_class.get(workflow_id) workflow.id_user = user_id if request.referrer: base_rt_url = get_rt_link_for_ticket('').replace('?', '\?') ticket_match = re.match(base_rt_url + '(?P<ticket_id>\d+)', request.referrer) if ticket_match: ticket_id = int(ticket_match.group('ticket_id')) workflow.extra_data['curation_ticket_id'] = ticket_id workflow.save() db.session.commit() url = "{}{}".format(current_app.config['WORKFLOWS_EDITOR_API_URL'], workflow_id) return redirect(location=url, code=302)
def test_format_inbook(app): inbook = get_db_record('lit', 1375491) expected = ("Bechtle:2015nta", Entry('inbook', [ ('pages', u"421--462"), ('title', u"Supersymmetry"), ('year', u"2015"), ('doi', u"10.1007/978-3-319-15001-7_10"), ('archivePrefix', u"arXiv"), ('eprint', u"1506.03091"), ('primaryClass', u"hep-ex"), ], persons={ 'editor': [], 'author': [ Person(u"Bechtle, Philip"), Person(u"Plehn, Tilman"), Person(u"Sander, Christian") ], })) schema = PybtexSchema() result = schema.load(inbook) assert result is not None assert pybtex_entries_equal(result, expected)
def test_format_proceeding(app): proceedings = get_db_record('lit', 701585) expected = ( "Alekhin:2005dx", Entry('proceedings', [ ('address', u"Geneva"), ('pages', u"pp.1--326"), ('publisher', u"CERN"), ('title', u"HERA and the LHC: A Workshop on the implications of HERA for LHC physics: Proceedings Part A" ), ('year', u"2005"), ('reportNumber', u"CERN-2005-014, DESY-PROC-2005-01"), ('archivePrefix', u"arXiv"), ('eprint', u"hep-ph/0601012"), ('url', u"http://weblib.cern.ch/abstract?CERN-2005-014"), ], persons={ 'editor': [Person(u"De Roeck, A."), Person(u"Jung, H.")], 'author': [], })) schema = PybtexSchema() result = schema.load(proceedings) assert result is not None assert pybtex_entries_equal(result, expected)
def test_references_can_be_updated(app, records_to_be_merged): merged_record = get_db_record('lit', 111) deleted_record = get_db_record('lit', 222) deleted_record.merge(merged_record) update_refs.delay( 'http://localhost:5000/api/literature/222', 'http://localhost:5000/api/literature/111') pointing_record = get_db_record('lit', 333) expected = 'http://localhost:5000/api/literature/111' result = get_value( pointing_record, 'accelerator_experiments[0].record.$ref') assert expected == result
def test_revert_to_revision(log_in_as_cataloger, record_with_two_revisions, api_client): record = get_db_record('lit', 111) assert record['titles'][0]['title'] == 'record rev1' response = api_client.put( '/editor/literature/111/revisions/revert', content_type='application/json', data=json.dumps({'revision_id': 0}), ) assert response.status_code == 200 record = get_db_record('lit', 111) assert record['titles'][0]['title'] == 'record rev0'
def test_record_can_be_deleted(api_client, not_yet_deleted_record): assert api_client.get('/literature/111').status_code == 200 record = get_db_record('lit', 111) record.delete() db.session.commit() assert api_client.get('/literature/111').status_code == 410
def test_get_literature_recids_for_orcid_raises_if_two_authors_are_found(isolated_app): record = get_db_record('aut', 1061000) record['control_number'] = 1061001 record = InspireRecord.create_or_update(record) record.commit() with pytest.raises(MultipleResultsFound): get_literature_recids_for_orcid('0000-0003-4792-9178')
def test_convert_to_tei_handles_preprints(app): record = get_db_record('lit', 1498589) schema = etree.XMLSchema(etree.parse(pkg_resources.resource_stream( __name__, os.path.join('fixtures', 'aofr.xsd')))) result = etree.fromstring(convert_to_tei(record).encode('utf8')) assert schema.validate(result)
def test_convert_to_tei(cern_with_hal_id): record = get_db_record('lit', 1472986) schema = etree.XMLSchema(etree.parse(pkg_resources.resource_stream( __name__, os.path.join('fixtures', 'aofr.xsd')))) result = etree.fromstring(convert_to_tei(record).encode('utf8')) assert schema.validate(result)
def test_get_literature_recids_for_orcid_still_works_if_author_has_no_ids(isolated_app): record = get_db_record('aut', 1061000) del record['ids'] record = InspireRecord.create_or_update(record) record.commit() with pytest.raises(NoResultFound): get_literature_recids_for_orcid('0000-0003-4792-9178')
def test_get_literature_recids_for_orcid_still_works_if_author_has_no_orcid_id(isolated_app): record = get_db_record('aut', 1061000) record['ids'] = [{'schema': 'INSPIRE BAI', 'value': 'Maurizio.Martinelli.1'}] record = InspireRecord.create_or_update(record) record.commit() with pytest.raises(NoResultFound): get_literature_recids_for_orcid('0000-0003-4792-9178')
def test_format_tei(app): expected = pkg_resources.resource_string( __name__, os.path.join('fixtures', 'test_tei_record.xml')) record = get_db_record('literature', 1407506) result = tei.tei_response(record) assert result == expected
def test_format_tei(app): expected = pkg_resources.resource_string( __name__, os.path.join('fixtures', 'test_tei_record.xml')) record = get_db_record('lit', 1407506) result = tei.tei_response(record) assert result.strip() == expected.strip()
def test_record_can_be_deleted(api_client, not_yet_deleted_record): assert api_client.get('/literature/111').status_code == 200 record = get_db_record('lit', 111) record.delete() db.session.commit() assert api_client.get('/literature/111').status_code == 410
def revert_to_revision(endpoint, pid_value): """Revert given record to given revision""" pid_type = get_pid_type_from_endpoint(endpoint) record = get_db_record(pid_type, pid_value) revision_id = request.json['revision_id'] record.revert(revision_id) db.session.commit() return jsonify(success=True)
def revert_to_revision(endpoint, pid_value): """Revert given record to given revision""" pid_type = get_pid_type_from_endpoint(endpoint) record = get_db_record(pid_type, pid_value) revision_id = request.json['revision_id'] record.revert(revision_id) db.session.commit() return jsonify(success=True)
def check_if_record_is_going_to_be_deleted(sender, *args, **kwargs): """Checks if 'deleted' field is set as True before updating. If 'deleted' field exists and its value is True, before update, then delete all the record's pidstores. """ control_number = int(sender.get('control_number')) collection = InspireRecordIdProvider.schema_to_pid_type(sender.get('$schema')) record = get_db_record(collection, control_number) if sender.get('deleted'): new_recid = get_recid_from_ref(sender.get('new_record')) if new_recid: merged_record = get_db_record(collection, new_recid) merge_pidstores_of_two_merged_records(merged_record.id, record.id) else: record = get_db_record(collection, control_number) soft_delete_pidstore_for_record(record.id)
def test_get_literature_recids_for_orcid_still_works_if_author_has_no_ids( isolated_app): record = get_db_record('aut', 1061000) del record['ids'] record = InspireRecord.create_or_update(record) record.commit() with pytest.raises(NoResultFound): get_literature_recids_for_orcid('0000-0003-4792-9178')
def test_get_literature_recids_for_orcid_raises_if_two_authors_are_found( isolated_app): record = get_db_record('aut', 1061000) record['control_number'] = 1061001 record = InspireRecord.create_or_update(record) record.commit() with pytest.raises(MultipleResultsFound): get_literature_recids_for_orcid('0000-0003-4792-9178')
def test_revert_to_revision(log_in_as_cataloger, record_with_two_revisions, api_client): record = get_db_record('lit', 111) assert record['titles'][0]['title'] == 'record rev1' response = api_client.put( '/editor/literature/111/revisions/revert', content_type='application/json', data=json.dumps({ 'revision_id': 0 }), ) assert response.status_code == 200 record = get_db_record('lit', 111) assert record['titles'][0]['title'] == 'record rev0'
def test_manual_merge_existing_records(workflow_app): json_head = fake_record('This is the HEAD', 1) json_update = fake_record('While this is the update', 2) # this two fields will create a merging conflict json_head['core'] = True json_update['core'] = False head = InspireRecord.create_or_update(json_head, skip_files=False) head.commit() update = InspireRecord.create_or_update(json_update, skip_files=False) update.commit() head_id = head.id update_id = update.id obj_id = start_merger( head_id=1, update_id=2, current_user_id=1, ) do_resolve_manual_merge_wf(workflow_app, obj_id) # retrieve it again, otherwise Detached Instance Error obj = workflow_object_class.get(obj_id) assert obj.status == ObjectStatus.COMPLETED assert obj.extra_data['approved'] is True assert obj.extra_data['auto-approved'] is False # no root present before last_root = read_wf_record_source(head_id, 'arxiv') assert last_root is None update_source = LiteratureReader(update).source root_update = read_wf_record_source(update_id, update_source) assert root_update is None # check that head's content has been replaced by merged deleted_record = RecordMetadata.query.filter_by(id=update_id).one() latest_record = get_db_record('lit', 1) assert deleted_record.json['deleted'] is True # check deleted record is linked in the latest one deleted_rec_ref = {'$ref': 'http://localhost:5000/api/literature/2'} assert [deleted_rec_ref] == latest_record['deleted_records'] # check the merged record is linked in the deleted one new_record_metadata = {'$ref': 'http://localhost:5000/api/literature/1'} assert new_record_metadata == deleted_record.json['new_record'] del latest_record['deleted_records'] assert latest_record == obj.data # -> resulted merged record
def test_manual_merge_existing_records(workflow_app): json_head = fake_record('This is the HEAD', 1) json_update = fake_record('While this is the update', 2) # this two fields will create a merging conflict json_head['core'] = True json_update['core'] = False head = InspireRecord.create_or_update(json_head, skip_files=False) head.commit() update = InspireRecord.create_or_update(json_update, skip_files=False) update.commit() head_id = head.id update_id = update.id obj_id = start_merger( head_id=1, update_id=2, current_user_id=1, ) do_resolve_manual_merge_wf(workflow_app, obj_id) # retrieve it again, otherwise Detached Instance Error obj = workflow_object_class.get(obj_id) assert obj.status == ObjectStatus.COMPLETED assert obj.extra_data['approved'] is True assert obj.extra_data['auto-approved'] is False # no root present before last_root = read_wf_record_source(head_id, 'arxiv') assert last_root is None update_source = LiteratureReader(update).source root_update = read_wf_record_source(update_id, update_source) assert root_update is None # check that head's content has been replaced by merged deleted_record = RecordMetadata.query.filter_by(id=update_id).one() latest_record = get_db_record('lit', 1) assert deleted_record.json['deleted'] is True # check deleted record is linked in the latest one deleted_rec_ref = {'$ref': 'http://localhost:5000/api/literature/2'} assert [deleted_rec_ref] == latest_record['deleted_records'] # check the merged record is linked in the deleted one new_record_metadata = {'$ref': 'http://localhost:5000/api/literature/1'} assert new_record_metadata == deleted_record.json['new_record'] del latest_record['deleted_records'] assert latest_record == obj.data # -> resulted merged record
def resolve_conference_record_as_root(self, pub_info_item): conference_record = pub_info_item.get('conference_record') if conference_record is None: return {} _, recid = get_pid_from_record_uri(conference_record.get('$ref')) conference = get_db_record('con', recid).dumps() titles = conference.get('titles') if titles is None: return {} return conference
def test_format_cv_latex_html(app): record = get_db_record('literature', 4328) expected = ( '<a href="localhost:5000/record/4328">Partial Symmetries of Weak Interactions.' '</a>,By S.L. Glashow.,<a href="http://dx.doi.org/10.1016/0029-5582(61)90469-2"' '>10.1016/0029-5582(61)90469-2</a>.,Nucl.Phys. 22 (1961) 579-588.,,') result = Cv_latex_html_text(record, 'cv_latex_html', ',').format() assert expected == result
def _set_transaction_user_id_for_last_record_update(control_number, user_id): record = get_db_record('lit', control_number) revision = record.model.versions.filter_by(version_id=(record.revision_id + 1)).one() transaction_id = revision.transaction_id Transaction = transaction_class(RecordMetadata) transaction = Transaction.query.filter(Transaction.id == transaction_id).one() transaction.user_id = user_id db.session.add(transaction)
def store_records(obj, eng): """Store the records involved in the manual merge. Performs the following steps: 1. Updates the ``head`` so that it contains the result of the merge. 2. Marks the ``update`` as merged with the ``head`` and deletes it. 3. Populates the ``deleted_records`` and ``new_record`` keys in, respectively, ``head`` and ``update`` so that they contain a JSON reference to each other. Todo: The last step should be performed by the ``merge`` method itself. Args: obj: a workflow object. eng: a workflow engine. Returns: None """ head_control_number = obj.extra_data['head_control_number'] update_control_number = obj.extra_data['update_control_number'] head = get_db_record('lit', head_control_number) update = get_db_record('lit', update_control_number) # 1. Updates the head so that it contains the result of the merge. head.clear() head.update(obj.data) # 2. Marks the update as merged with the head and deletes it. update.merge(head) update.delete() # 3. Populates the deleted_records and new_record keys. update['new_record'] = get_record_ref(head_control_number, 'literature') update_ref = get_record_ref(update_control_number, 'literature') head.setdefault('deleted_records', []).append(update_ref) head.commit() update.commit() db.session.commit()
def update_record(obj, eng): control_number = obj.data['control_number'] record = get_db_record('lit', control_number) record.update(obj.data) record.commit() user_id = obj.id_user if user_id: _set_transaction_user_id_for_last_record_update(control_number, user_id) db.session.commit()
def test_convert_to_tei(cern_with_hal_id): record = get_db_record('lit', 1472986) schema = etree.XMLSchema( etree.parse( pkg_resources.resource_stream(__name__, os.path.join('fixtures', 'aofr.xsd')))) result = etree.fromstring(convert_to_tei(record).encode('utf8')) assert schema.validate(result)
def test_record_api_update(app, api_client, user_info, status): """Test that a record can be updated only from admin and cataloger users through the API.""" if user_info: login_user_via_session(api_client, email=user_info['email']) record = get_db_record('lit', 1497201) resp = api_client.put('/literature/1497201', data=json.dumps(record), content_type='application/json') assert resp.status_code == status
def test_record_api_update_restricted_record(app, api_client, user_info, status): """Test that a restricted record can be updated only by users with the right permission.""" if user_info: login_user_via_session(api_client, email=user_info['email']) record = get_db_record('lit', 1090628) resp = api_client.put('/literature/1090628', data=json.dumps(record), content_type='application/json') assert resp.status_code == status