def test_contribution_create(app, contribution_person_data_tmp, caplog): """Test MEF contribution creation.""" pers = Contribution.get_record_by_pid('1') assert not pers pers = Contribution.create(contribution_person_data_tmp, dbcommit=True, delete_pid=True) assert pers == contribution_person_data_tmp assert pers.get('pid') == '1' pers = Contribution.get_record_by_pid('1') assert pers == contribution_person_data_tmp fetched_pid = contribution_id_fetcher(pers.id, pers) assert fetched_pid.pid_value == '1' assert fetched_pid.pid_type == 'cont' contribution_person_data_tmp['viaf_pid'] = '1234' pers = Contribution.create(contribution_person_data_tmp, dbcommit=True, delete_pid=True) pers = Contribution.get_record_by_pid('2') assert pers.get('viaf_pid') == '1234' assert pers.organisation_pids == [] pers.delete_from_index() # test the messages from current_app.logger assert caplog.records[0].name == 'elasticsearch' assert caplog.record_tuples[1] == ( 'flask.app', 30, 'Can not delete from index Contribution: 2')
def test_replace_idby_subjects(mock_contributions_mef_get, app, document_data, contribution_person_response_data): """Test replace identifiedBy in subjects.""" assert replace_idby_subjects() == (0, 0, 0, 0, 0) doc = Document.create(data=document_data, dbcommit=True, reindex=True) DocumentsSearch.flush_and_refresh() replace = ReplaceMefIdentifiedBySubjects() replace.process() assert replace.counts_len == (0, 0, 0, 0, 1) without_idref_gnd = deepcopy(contribution_person_response_data) without_idref_gnd['hits']['hits'][0]['metadata'].pop('idref') without_idref_gnd['hits']['hits'][0]['metadata'].pop('gnd') mock_contributions_mef_get.return_value = mock_response( json_data=without_idref_gnd) assert replace_idby_subjects() == (0, 0, 0, 1, 0) without_idref_gnd = deepcopy(contribution_person_response_data) without_idref_gnd['hits']['hits'][0]['metadata']['deleted'] = '2022' mock_contributions_mef_get.return_value = mock_response( json_data=without_idref_gnd) assert replace_idby_subjects() == (0, 0, 1, 0, 0) mock_contributions_mef_get.return_value = mock_response( json_data=contribution_person_response_data) assert replace_idby_subjects() == (1, 0, 0, 0, 0) # clean up doc.delete(dbcommit=True, delindex=True, force=True) for id in Contribution.get_all_ids(): cont = Contribution.get_record_by_id(id) cont.delete(dbcommit=True, delindex=True, force=True)
def test_contribution_es_mapping(es_clear, db, contribution_person_data_tmp): """Test mef elasticsearch mapping.""" search = ContributionsSearch() mapping = get_mapping(search.Meta.index) assert mapping Contribution.create(contribution_person_data_tmp, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index)
def contribution_person(app, contribution_person_data): """Create mef contribution record.""" pers = Contribution.create(data=contribution_person_data, delete_pid=False, dbcommit=True, reindex=True) flush_index(ContributionsSearch.Meta.index) return pers
def render(self, language=None, **kwargs) -> str: """Render the subject as a string. :param language: preferred language for the subject. :return the string representation of this subject. """ sub, _ = Contribution.get_record_by_ref(self.reference) return sub.get_authorized_access_point(language=language)
def test_contribution_mef_create(mock_contributions_mef_get, app, contribution_person_data_tmp, contribution_person_response_data): """Test MEF contribution creation.""" count = Contribution.count() mock_contributions_mef_get.return_value = mock_response( json_data=contribution_person_response_data) pers_mef, online = Contribution.get_record_by_ref( 'https://mef.rero.ch/api/rero/A017671081') flush_index(ContributionsSearch.Meta.index) assert pers_mef == contribution_person_data_tmp assert online assert Contribution.count() == count + 1 pers_mef.pop('idref') pers_mef['sources'] = ['gnd'] pers_mef.replace(pers_mef, dbcommit=True) pers_db, online = Contribution.get_record_by_ref( 'https://mef.rero.ch/api/gnd/13343771X') assert pers_db['sources'] == ['gnd'] assert not online
def test_monitoring_check_es_db_counts(app, client, contribution_person_data, system_librarian_martigny): """Test monitoring check_es_db_counts.""" res = client.get(url_for('api_monitoring.check_es_db_counts', delay=0)) assert res.status_code == 200 assert get_json(res) == {'data': {'status': 'green'}} pers = Contribution.create(data=contribution_person_data, delete_pid=False, dbcommit=True, reindex=False) flush_index(ContributionsSearch.Meta.index) res = client.get(url_for('api_monitoring.check_es_db_counts', delay=0)) assert res.status_code == 200 assert get_json(res) == { 'data': { 'status': 'red' }, 'errors': [{ 'code': 'DB_ES_COUNTER_MISSMATCH', 'details': 'There are 1 items from cont missing in ES.', 'id': 'DB_ES_COUNTER_MISSMATCH', 'links': { 'about': 'http://localhost/monitoring/check_es_db_counts', 'cont': 'http://localhost/monitoring/missing_pids/cont' }, 'title': "DB items counts don't match ES items count." }] } # this view is only accessible by monitoring res = client.get(url_for('api_monitoring.missing_pids', doc_type='cont')) assert res.status_code == 401 login_user_via_session(client, system_librarian_martigny.user) res = client.get(url_for('api_monitoring.missing_pids', doc_type='cont')) assert res.status_code == 403 # give user superuser admin rights db.session.add( ActionUsers.allow(superuser_access, user=system_librarian_martigny.user)) db.session.commit() res = client.get( url_for('api_monitoring.missing_pids', doc_type='cont', delay=0)) assert res.status_code == 200 assert get_json(res) == { 'data': { 'DB': [], 'ES': ['http://localhost/contributions/cont_pers'], 'ES duplicate': [] } }
def test_contribution_create_delete(app, contribution_person_data_tmp, capsys): """Test mef contributions creation and deletion.""" count = create_mef_records([contribution_person_data_tmp], verbose=True) assert count == 1 out, err = capsys.readouterr() pers = Contribution.get_record_by_pid('cont_pers') assert out.strip() == 'record uuid: {id}'.format(id=pers.id) count = delete_records([pers], verbose=True) assert count == 1 out, err = capsys.readouterr() assert out.strip() == 'records deleted: 1'
def test_publish_api_harvested_records(app, contribution_person_data_tmp, capsys): """Test mef contribution publish api harvested records.""" publish_api_harvested_records(sender='test', name='mef', records=[contribution_person_data_tmp], url='http://test.com') out, err = capsys.readouterr() assert out.strip() == ( 'mef harvester: received 1 records: ' 'https://ils.rero.ch/schemas/contributions/contribution-v0.0.1.json') assert Contribution.count() == 1
def contribution_proxy(viewcode, pid, contribution_type): """Proxy for contributions. :param viewcode: viewcode of html request :param pid: pid of contribution :param contribution_type: type of contribution :returns: contribution template """ contribution = Contribution.get_record_by_pid(pid) if contribution and contribution['type'] != contribution_type: abort(404, 'Record not found') persistent_id = PersistentIdentifier.get('cont', pid) return contribution_view_method( pid=persistent_id, record=contribution, template='rero_ils/detailed_view_contribution.html', viewcode=viewcode )
def do(self, blob, language='en', ignore_missing=True, exception_handlers=None, with_holdings_items=False, organisation_pids=None, library_pids=None, location_pids=None): """Translate blob values and instantiate new model instance. Raises ``MissingRule`` when no rule matched and ``ignore_missing`` is ``False``. :param blob: ``dict``-like object on which the matching rules are going to be applied. :param ignore_missing: Set to ``False`` if you prefer to raise an exception ``MissingRule`` for the first key that it is not matching any rule. :param exception_handlers: Give custom exception handlers to take care of non-standard codes that are installation specific. :param with_holdings_items: Add holding, item information in field 949 to the result (attention time consuming). :param organisation_pids: Which organisations items to add. :param library_pids: Which libraries items to add. :param location_pids: Which locations items to add.): :param language: Language to use. """ # TODO: real leader self.language = language blob['leader'] = LEADER # create fixed_length_data_elements for 008 # TODO: add 008/00-05 Date entered on file fixed_data = '000000|||||||||xx#|||||||||||||||||||||c' provision_activity = blob.get('provisionActivity', []) for p_activity in provision_activity: if p_activity.get('type') == 'bf:Publication': end_date = str(p_activity.get('endDate', '')) if end_date: fixed_data = fixed_data[:11] + end_date + fixed_data[15:] start_date = str(p_activity.get('startDate', '')) if start_date: fixed_data = fixed_data[:7] + start_date + fixed_data[11:] break language = utils.force_list(blob.get('language')) if language: language = language[0].get('value') fixed_data = fixed_data[:35] + language + fixed_data[38:] blob['fixed_length_data_elements'] = fixed_data # Add responsibilityStatement to title if blob.get('title'): blob['title_responsibility'] = { 'titles': blob.get('title', {}), 'responsibility': ' ; '.join( create_title_responsibilites( blob.get('responsibilityStatement', []))) } # Fix ContributionsSearch order = current_app.config.get('RERO_ILS_CONTRIBUTIONS_LABEL_ORDER', []) source_order = order.get(self.language, order.get(order['fallback'], [])) contributions = blob.get('contribution', []) for contribution in contributions: ref = contribution['agent'].get('$ref') if ref: agent, _ = Contribution.get_record_by_ref(ref) if agent: contribution['agent'] = agent replace_contribution_sources(contribution=contribution, source_order=source_order) if with_holdings_items: # add holdings items informations get_holdings_items blob['holdings_items'] = get_holdings_items( document_pid=blob.get('pid'), organisation_pids=organisation_pids, library_pids=library_pids, location_pids=location_pids) # Physical Description physical_description = {} extent = blob.get('extent') durations = ', '.join(blob.get('duration', [])) if extent: if durations: if f'({durations})' in extent: physical_description['extent'] = extent else: physical_description['extent'] = f'{extent} ({durations})' else: physical_description['extent'] = extent note = blob.get('note', []) other_physical_details = [] for value in note: if value['noteType'] == 'otherPhysicalDetails': other_physical_details.append(value['label']) if not other_physical_details: for value in blob.get('productionMethod', []): other_physical_details.append(translate(value)) for value in blob.get('illustrativeContent', []): other_physical_details.append(value) for value in blob.get('colorContent', []): other_physical_details.append(translate(value)) if other_physical_details: physical_description['other_physical_details'] = \ ' ; '.join(other_physical_details) accompanying_material = ' ; '.join([ v.get('label') for v in note if v['noteType'] == 'accompanyingMaterial' ]) if accompanying_material: physical_description['accompanying_material'] = \ accompanying_material dimensions = blob.get('dimensions', []) book_formats = blob.get('bookFormat', []) upper_book_formats = [v.upper() for v in book_formats] new_dimensions = [] for dimension in dimensions: try: index = upper_book_formats.index(dimension.upper()) new_dimensions.append(book_formats[index]) del book_formats[index] except ValueError: new_dimensions.append(dimension) for book_format in book_formats: new_dimensions.append(book_format) if new_dimensions: physical_description['dimensions'] = ' ; '.join(new_dimensions) if physical_description: blob['physical_description'] = physical_description # Add order keys = {} for key, value in blob.items(): count = 1 if isinstance(value, (list, set, tuple)): count = len(value) keys.setdefault(key, count - 1) keys[key] += 1 order = [] for key in ORDER: for count in range(0, keys.get(key, 0)): order.append(key) blob['__order__'] = order result = super().do(blob, ignore_missing=ignore_missing, exception_handlers=exception_handlers) return result