def test_document_person_resolve_exception(es_clear, db, document_data_ref): """Test document person resolve.""" with pytest.raises(Exception): Document.create(data=document_data_ref, delete_pid=False, dbcommit=True, reindex=True)
def test_document_create(db, document_data_tmp): """Test document creation.""" ptty = Document.create(document_data_tmp, delete_pid=True) assert ptty == document_data_tmp assert ptty.get('pid') == '1' assert ptty.dumps()['editionStatement'][0]['_text'] == [{ 'language': 'chi-hani', 'value': '第3版 / 曾令良主编' }, { 'language': 'default', 'value': 'Di 3 ban / Zeng Lingliang zhu bian' }] doc = Document.get_record_by_pid('1') assert doc == document_data_tmp fetched_pid = document_id_fetcher(ptty.id, ptty) assert fetched_pid.pid_value == '1' assert fetched_pid.pid_type == 'doc' with pytest.raises(IlsRecordError.PidAlreadyUsed): new_doc = Document.create(doc)
def test_document_es_mapping(es, db, org_martigny, document_data_ref, item_lib_martigny, person): """Test document elasticsearch mapping.""" search = DocumentsSearch() mapping = get_mapping(search.Meta.index) assert mapping data = deepcopy(document_data_ref) Document.create(data, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index)
def test_document_es_mapping(db, org_martigny, document_data_tmp, item_lib_martigny): """Test document elasticsearch mapping.""" search = DocumentsSearch() mapping = get_mapping(search.Meta.index) assert mapping Document.create(document_data_tmp, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index)
def test_document_es_mapping(es_clear, db, organisation, document_data_tmp, item_on_loan): """.""" search = DocumentsSearch() mapping = get_mapping(search.Meta.index) assert mapping Document.create(document_data_tmp, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index)
def test_document_contribution_resolve_exception(es_clear, db, document_data_ref): """Test document contribution resolve.""" document_data_ref['contribution'] = [{ '$ref': 'https://mef.rero.ch/api/rero/XXXXXX' }], with pytest.raises(Exception): Document.create(data=document_data_ref, delete_pid=False, dbcommit=True, reindex=True)
def test_document_es_mapping(mock_contributions_mef_get, es, db, org_martigny, document_data_ref, item_lib_martigny, contribution_person_response_data): """Test document elasticsearch mapping.""" search = DocumentsSearch() mapping = get_mapping(search.Meta.index) assert mapping data = deepcopy(document_data_ref) mock_contributions_mef_get.return_value = mock_response( json_data=contribution_person_response_data) Document.create(data, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index)
def test_replace_idby_subjects(mock_contributions_mef_get, app, document_data, contribution_person_response_data): """Test replace identifiedBy in subjects.""" assert replace_idby_subjects() == (0, 0, 0, 0, 0) doc = Document.create(data=document_data, dbcommit=True, reindex=True) DocumentsSearch.flush_and_refresh() replace = ReplaceMefIdentifiedBySubjects() replace.process() assert replace.counts_len == (0, 0, 0, 0, 1) without_idref_gnd = deepcopy(contribution_person_response_data) without_idref_gnd['hits']['hits'][0]['metadata'].pop('idref') without_idref_gnd['hits']['hits'][0]['metadata'].pop('gnd') mock_contributions_mef_get.return_value = mock_response( json_data=without_idref_gnd) assert replace_idby_subjects() == (0, 0, 0, 1, 0) without_idref_gnd = deepcopy(contribution_person_response_data) without_idref_gnd['hits']['hits'][0]['metadata']['deleted'] = '2022' mock_contributions_mef_get.return_value = mock_response( json_data=without_idref_gnd) assert replace_idby_subjects() == (0, 0, 1, 0, 0) mock_contributions_mef_get.return_value = mock_response( json_data=contribution_person_response_data) assert replace_idby_subjects() == (1, 0, 0, 0, 0) # clean up doc.delete(dbcommit=True, delindex=True, force=True) for id in Contribution.get_all_ids(): cont = Contribution.get_record_by_id(id) cont.delete(dbcommit=True, delindex=True, force=True)
def test_document_can_delete_harvested(app, ebook_1_data): """Test can delete for harvested records.""" document = Document.create(ebook_1_data, delete_pid=True) can, reasons = document.can_delete assert document.harvested assert not can assert reasons['others']['harvested']
def doc_title_travailleuses(app): """Document with title with travailleuses.""" data = { '$schema': 'https://ils.rero.ch/schemas/documents/' 'document-v0.0.1.json', 'pid': 'doc_title_test2', 'type': 'book', 'language': [{'type': 'bf:Language', 'value': 'fre'}], 'title': [{ 'type': 'bf:Title', 'mainTitle': [{ 'value': "Les travailleuses partent à l'école" }], 'subtitle': [{'value': "lorsqu'un est bœuf ex aequo"}] }], 'authors': [{ 'name': 'Müller, John', 'type': 'person' }, { 'name': 'Corminbœuf, Gruß', 'type': 'person' }], } doc = Document.create( data=data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def document2_with_issn(app, journal2_data_with_issn): """Load document record.""" doc = Document.create(data=journal2_data_with_issn, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def document_sion_items(app, document_sion_items_data): """Create document data for sion items.""" doc = Document.create(data=document_sion_items_data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def ebook_4(app, ebook_4_data): """Load ebook 4 record.""" doc = Document.create(data=ebook_4_data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def journal(app, journal_data): """Load journal record.""" doc = Document.create(data=journal_data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def document(app, document_data): """Load document record.""" doc = Document.create(data=document_data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def doc_title_travailleuses(app): """Document with title with travailleuses.""" data = { '$schema': 'https://bib.rero.ch/schemas/documents/' 'document-v0.0.1.json', 'pid': 'doc_title_test2', 'type': [{ "main_type": "docmaintype_book", "subtype": "docsubtype_other_book" }], 'language': [{ 'type': 'bf:Language', 'value': 'fre' }], 'title': [{ 'type': 'bf:Title', 'mainTitle': [{ 'value': "Les travailleuses partent à l'école 100" }], 'subtitle': [{ 'value': "lorsqu'un est bœuf ex aequo" }] }], 'contribution': [{ 'agent': { 'preferred_name': 'Müller, John', 'type': 'bf:Person' }, 'role': ['aut'] }, { 'agent': { 'preferred_name': 'Corminbœuf, Gruß', 'type': 'bf:Person' }, 'role': ['aut'] }], "provisionActivity": [{ "type": "bf:Publication", "startDate": 1818 }], 'issuance': { 'main_type': 'rdami:1001', 'subtype': 'materialUnit' }, 'adminMetadata': { 'encodingLevel': 'Minimal level' } } doc = Document.create(data=data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def ebook_3(app, ebook_3_data): """Load ebook 3 record.""" del ebook_3_data['electronicLocator'] doc = Document.create(data=ebook_3_data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def document2_ref(mock_persons_mef_get, app, document2_data_ref, person2_response_data): """Load document with mef records reference.""" mock_persons_mef_get.return_value = mock_response( json_data=person2_response_data) doc = Document.create(data=document2_data_ref, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def test_document_create(db, document_data_tmp): """Test pttyanisation creation.""" ptty = Document.create(document_data_tmp, delete_pid=True) assert ptty == document_data_tmp assert ptty.get('pid') == '1' ptty = Document.get_record_by_pid('1') assert ptty == document_data_tmp fetched_pid = document_id_fetcher(ptty.id, ptty) assert fetched_pid.pid_value == '1' assert fetched_pid.pid_type == 'doc'
def document_ref(mock_resolver_get, mock_listener_get, app, document_data_ref, mef_person_response_data): """.""" mock_resolver_get.return_value = mock_response( json_data=mef_person_response_data ) mock_listener_get.return_value = mock_response( json_data=mef_person_response_data ) doc = Document.create( data=document_data_ref, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def doc_title_travailleurs(app): """Document with title with travailleur.""" data = { '$schema': 'https://bib.rero.ch/schemas/documents/' 'document-v0.0.1.json', 'pid': 'doc_title_test1', 'type': [{ "main_type": "docmaintype_book", "subtype": "docsubtype_other_book" }], 'language': [{ 'type': 'bf:Language', 'value': 'fre' }], 'title': [{ 'type': 'bf:Title', 'mainTitle': [{ 'value': 'Les travailleurs assidus sont de retours' }], 'subtitle': [{ 'value': 'les jeunes arrivent bientôt ?' }] }], "provisionActivity": [{ "type": "bf:Publication", "startDate": 1818 }], 'issuance': { 'main_type': 'rdami:1001', 'subtype': 'materialUnit' }, 'adminMetadata': { 'encodingLevel': 'Minimal level' } } doc = Document.create(data=data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def doc_title_travailleurs(app): """Document with title with travailleur.""" data = { '$schema': 'https://ils.rero.ch/schemas/documents/' 'document-v0.0.1.json', 'pid': 'doc_title_test1', 'type': 'book', 'language': [{'type': 'bf:Language', 'value': 'fre'}], 'title': [{ 'type': 'bf:Title', 'mainTitle': [{ 'value': 'Les travailleurs assidus sont de retours' }], 'subtitle': [{'value': 'les jeunes arrivent bientôt ?'}] }] } doc = Document.create( data=data, delete_pid=False, dbcommit=True, reindex=True) flush_index(DocumentsSearch.Meta.index) return doc
def test_contribution_format(db, document_data): """Test contribution format.""" result = 'Nebehay, Christian Michael' doc = Document.create(document_data, delete_pid=True) assert contribution_format(doc.pid, 'en', 'global').startswith(result)
def test_documents_import_bnf_ean(client): """Test document import from bnf.""" res = client.get(url_for('api_imports.imports_search', q='ean:any:123')) assert res.status_code == 200 data = get_json(res) assert not data.get('metadata') res = client.get( url_for('api_imports.imports_search', q='ean:any:9782070541270')) assert res.status_code == 200 data = get_json(res).get('hits').get('hits')[0].get('metadata') assert data['pid'] == 'FRBNF370903960000006' assert Document.create(data) res = client.get( url_for('api_imports.imports_search', q='ean:any:9782072862014')) assert res.status_code == 200 res_j = get_json(res) data = res_j.get('hits').get('hits')[0].get('metadata') data.update({ "$schema": "https://ils.rero.ch/schemas/documents/document-v0.0.1.json" }) assert Document.create(data) marc21_link = res_j.get('hits').get('hits')[0].get('links').get('marc21') res = client.get(marc21_link) data = get_json(res) assert data[0][0] == 'leader' res = client.get(url_for('api_imports.imports_search', q='')) assert res.status_code == 200 assert get_json(res) == { 'aggregations': {}, 'hits': { 'hits': [], 'remote_total': 0, 'total': 0 } } res = client.get(url_for('api_imports.imports_search', q='peter')) assert res.status_code == 200 unfiltered_total = get_json(res)['hits']['remote_total'] assert get_json(res) res = client.get( url_for('api_imports.imports_search', q='peter', year=2000, format='rerojson')) assert res.status_code == 200 get_json(res)['hits']['remote_total'] < unfiltered_total res = client.get( url_for('api_imports.imports_search', q='peter', author='Peter Owen', format='rerojson')) assert res.status_code == 200 get_json(res)['hits']['remote_total'] < unfiltered_total res = client.get( url_for('api_imports.imports_search', q='peter', type='book', format='rerojson')) assert res.status_code == 200 get_json(res)['hits']['remote_total'] < unfiltered_total res = client.get( url_for('api_imports.imports_record', id='FRBNF370903960000006')) assert res.status_code == 200 assert get_json(res).get('metadata', {}).get('identifiedBy') res = client.get( url_for('api_imports.imports_record', id='FRBNF370903960000006', format='rerojson')) assert res.status_code == 200 assert get_json(res).get('metadata', {}).get('ui_title_text') res = client.get( url_for('api_imports.imports_record', id='FRBNF370903960000006', format='marc')) assert res.status_code == 200 assert get_json(res)[1][1] == 'FRBNF370903960000006'
def test_monitoring(app, document_sion_items_data, script_info): """Test monitoring.""" for index in OperationLog.get_indices(): flush_index(index) cli_output = [ 'DB - ES type count index count', '----------------------------------------------------------------', ' 0 acac 0 acq_accounts 0', ' 0 acin 0 acq_invoices 0', ' 0 acol 0 acq_order_lines 0', ' 0 acor 0 acq_orders 0', ' 0 acre 0 acq_receipts 0', ' 0 acrl 0 acq_receipt_lines 0', ' 0 budg 0 budgets 0', ' 0 cipo 0 circ_policies 0', ' 0 coll 0 collections 0', ' 0 cont 0 contributions 0', ' 1 doc 1 documents 0', ' 0 hold 0 holdings 0', ' 0 illr 0 ill_requests 0', ' 0 item 0 items 0', ' 0 itty 0 item_types 0', ' 0 lib 0 libraries 0', ' loanid 0', ' 0 loc 0 locations 0', ' 0 lofi 0 local_fields 0', ' 0 notif 0 notifications 0', ' 0 oplg 0 operation_logs 1', ' 0 org 0 organisations 0', ' 0 ptre 0 patron_transaction_events 0', ' 0 ptrn 0 patrons 0', ' 0 pttr 0 patron_transactions 0', ' 0 ptty 0 patron_types 0', ' 0 stat 0 stats 0', ' 0 tmpl 0 templates 0', ' 0 vndr 0 vendors 0' ] mon = Monitoring(time_delta=0) assert mon.get_es_count('xxx') == 'No >>xxx<< in ES' assert mon.get_db_count('xxx') == 'No >>xxx<< in DB' doc = Document.create( data=document_sion_items_data, delete_pid=False, dbcommit=True, reindex=False ) doc_pid = doc.pid assert mon.get_db_count('doc') == 1 assert mon.get_es_count('documents') == 0 assert mon.check() == {'doc': {'db_es': 1}} assert mon.missing('doc') == {'DB': [], 'ES': ['doc3'], 'ES duplicate': []} # not flushed by default flush_index('operation_logs') assert mon.info() == { 'acac': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_accounts'}, 'acin': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_invoices'}, 'acol': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_order_lines'}, 'acor': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_orders'}, 'acre': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_receipts'}, 'acrl': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_receipt_lines'}, 'budg': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'budgets'}, 'cipo': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'circ_policies'}, 'coll': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'collections'}, 'cont': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'contributions'}, 'doc': {'db': 1, 'db-es': 1, 'es': 0, 'index': 'documents'}, 'hold': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'holdings'}, 'illr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'ill_requests'}, 'item': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'items'}, 'itty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'item_types'}, 'lib': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'libraries'}, 'loanid': {'db': 0}, 'loc': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'locations'}, 'lofi': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'local_fields'}, 'notif': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'notifications'}, 'oplg': {'db': 0, 'db-es': 0, 'es': 1, 'index': 'operation_logs'}, 'org': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'organisations'}, 'ptre': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_transaction_events'}, 'ptrn': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patrons'}, 'pttr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_transactions'}, 'ptty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_types'}, 'stat': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'stats'}, 'tmpl': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'templates'}, 'vndr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'vendors'} } assert mon.__str__().split('\n') == cli_output + [''] runner = CliRunner() res = runner.invoke(es_db_missing_cli, ['doc', '-d', 0], obj=script_info) assert res.output == f'ES missing doc: {doc.pid}\n' runner = CliRunner() res = runner.invoke(es_db_counts_cli, ['-m', '-d', 0], obj=script_info) assert res.output.split('\n') == cli_output + [ f'ES missing doc: {doc.pid}', '' ] # we have to get the doc again because we lost the session after the use # of the CliRunner doc = Document.get_record_by_pid(doc_pid) doc.reindex() flush_index(DocumentsSearch.Meta.index) assert mon.get_es_count('documents') == 1 assert mon.check() == {} assert mon.missing('doc') == {'DB': [], 'ES': [], 'ES duplicate': []} doc.delete(dbcommit=True) for index in OperationLog.get_indices(): flush_index(index) assert mon.get_db_count('doc') == 0 assert mon.get_es_count('documents') == 1 assert mon.check() == {'doc': {'db_es': -1}} assert mon.missing('doc') == {'DB': ['doc3'], 'ES': [], 'ES duplicate': []}
def test_document_can_delete(app, document_data_tmp): """Test can delete.""" document = Document.create(document_data_tmp, delete_pid=True) assert document.get_links_to_me() == {} assert document.can_delete
def test_document_can_delete_harvested(app, ebook_1_data): """Test can delete for harvested records.""" document = Document.create(ebook_1_data, delete_pid=True) assert document.harvested assert not document.can_delete
def test_authors_format(db, document_data): """Test authors format.""" result = 'Vincent, Sophie' doc = Document.create(document_data, delete_pid=True) assert result == authors_format(doc.pid, 'en', 'global')
def test_contribution_format(db, document_data): """Test contribution format.""" result = 'Vincent, Sophie' doc = Document.create(document_data, delete_pid=True) assert contribution_format(doc.pid, 'en', 'global').startswith(result)
def test_monitoring(app, document_sion_items_data, script_info): """Test monitoring.""" cli_output = [ 'DB - ES type count index count', '----------------------------------------------------------------', ' 0 acac 0 acq_accounts 0', ' 0 acin 0 acq_invoices 0', ' 0 acol 0 acq_order_lines 0', ' 0 acor 0 acq_orders 0', ' 0 budg 0 budgets 0', ' 0 cipo 0 circ_policies 0', ' 1 doc 1 documents 0', ' 0 hold 0 holdings 0', ' 0 item 0 items 0', ' 0 itty 0 item_types 0', ' 0 lib 0 libraries 0', ' loanid 0', ' 0 loc 0 locations 0', ' 0 notif 0 notifications 0', ' 0 org 0 organisations 0', ' 0 pers 0 persons 0', ' 0 ptre 0 patron_transaction_events 0', ' 0 ptrn 0 patrons 0', ' 0 pttr 0 patron_transactions 0', ' 0 ptty 0 patron_types 0', ' 0 vndr 0 vendors 0' ] mon = Monitoring() assert mon.get_es_count('xxx') == 'No >>xxx<< in ES' assert mon.get_db_count('xxx') == 'No >>xxx<< in DB' doc = Document.create(data=document_sion_items_data, delete_pid=False, dbcommit=True, reindex=False) doc_pid = doc.pid assert mon.get_db_count('doc') == 1 assert mon.get_es_count('documents') == 0 assert mon.check() == {'doc': 1} assert mon.missing('doc') == {'DB': [], 'ES': ['doc3'], 'ES duplicate': []} assert mon.info() == { 'acac': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_accounts' }, 'acin': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_invoices' }, 'acol': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_order_lines' }, 'acor': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_orders' }, 'budg': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'budgets' }, 'cipo': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'circ_policies' }, 'doc': { 'db': 1, 'db-es': 1, 'es': 0, 'index': 'documents' }, 'hold': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'holdings' }, 'item': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'items' }, 'itty': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'item_types' }, 'lib': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'libraries' }, 'loanid': { 'db': 0 }, 'loc': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'locations' }, 'notif': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'notifications' }, 'org': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'organisations' }, 'pers': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'persons' }, 'ptre': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_transaction_events' }, 'ptrn': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'patrons' }, 'pttr': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_transactions' }, 'ptty': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_types' }, 'vndr': { 'db': 0, 'db-es': 0, 'es': 0, 'index': 'vendors' } } assert mon.__str__().split('\n') == cli_output + [''] runner = CliRunner() res = runner.invoke(es_db_missing_cli, ['doc'], obj=script_info) assert res.output == 'doc: pids missing in ES:\ndoc3\n' runner = CliRunner() res = runner.invoke(es_db_counts_cli, ['-m'], obj=script_info) assert res.output.split( '\n') == cli_output + ['doc: pids missing in ES:', 'doc3', ''] # we have to get the doc again because we lost the session after the use # of the CliRunner doc = Document.get_record_by_pid(doc_pid) doc.reindex() flush_index(DocumentsSearch.Meta.index) assert mon.get_es_count('documents') == 1 assert mon.check() == {} assert mon.missing('doc') == {'DB': [], 'ES': [], 'ES duplicate': []} doc.delete(dbcommit=True) assert mon.get_db_count('doc') == 0 assert mon.get_es_count('documents') == 1 assert mon.check() == {'doc': -1} assert mon.missing('doc') == {'DB': [], 'ES': [], 'ES duplicate': []}