Пример #1
0
def test_document_es_mapping(mock_contributions_mef_get, es, db, org_martigny,
                             document_data_ref, item_lib_martigny,
                             contribution_person_response_data):
    """Test document elasticsearch mapping."""
    search = DocumentsSearch()
    mapping = get_mapping(search.Meta.index)
    assert mapping
    data = deepcopy(document_data_ref)
    mock_contributions_mef_get.return_value = mock_response(
        json_data=contribution_person_response_data)
    Document.create(data, dbcommit=True, reindex=True, delete_pid=True)
    assert mapping == get_mapping(search.Meta.index)
Пример #2
0
def test_document_contribution_resolve_exception(es_clear, db,
                                                 document_data_ref):
    """Test document contribution resolve."""
    document_data_ref['contribution'] = [{
        '$ref':
        'https://mef.rero.ch/api/rero/XXXXXX'
    }],
    with pytest.raises(Exception):
        Document.create(data=document_data_ref,
                        delete_pid=False,
                        dbcommit=True,
                        reindex=True)
Пример #3
0
def test_document_es_mapping(es_clear, db, organisation,
                             document_data_tmp, item_on_loan):
    """."""
    search = DocumentsSearch()
    mapping = get_mapping(search.Meta.index)
    assert mapping
    Document.create(
        document_data_tmp,
        dbcommit=True,
        reindex=True,
        delete_pid=True
    )
    assert mapping == get_mapping(search.Meta.index)
Пример #4
0
def test_create_holdings_with_pattern(client, librarian_martigny_no_email,
                                      loc_public_martigny, journal,
                                      item_type_standard_martigny, document,
                                      json_header, holding_lib_martigny_data,
                                      pattern_yearly_one_level_data,
                                      holding_lib_martigny_w_patterns_data):
    """Test create holding type serial with patterns."""
    login_user_via_session(client, librarian_martigny_no_email.user)
    post_entrypoint = 'invenio_records_rest.hold_list'

    del holding_lib_martigny_data['pid']
    holding_lib_martigny_data['holdings_type'] = 'serial'
    res, _ = postdata(client, post_entrypoint, holding_lib_martigny_data)
    assert res.status_code == 403

    holding_lib_martigny_data['patterns'] = \
        pattern_yearly_one_level_data['patterns']

    # test will fail when creating a serial holding for a standard document.
    res, _ = postdata(client, post_entrypoint, holding_lib_martigny_data)
    assert res.status_code == 403

    # test will fail when creating a standard holding for a journal document.
    holding_lib_martigny_w_patterns_data['holdings_type'] = 'standard'
    del holding_lib_martigny_w_patterns_data['patterns']
    with pytest.raises(RecordValidationError):
        Holding.create(data=holding_lib_martigny_w_patterns_data,
                       delete_pid=True,
                       dbcommit=True,
                       reindex=True)

    journal_pids = list(Document.get_all_serial_pids())
    assert journal_pids == [journal.pid]
Пример #5
0
def test_replace_idby_subjects(mock_contributions_mef_get, app, document_data,
                               contribution_person_response_data):
    """Test replace identifiedBy in subjects."""
    assert replace_idby_subjects() == (0, 0, 0, 0, 0)

    doc = Document.create(data=document_data, dbcommit=True, reindex=True)
    DocumentsSearch.flush_and_refresh()
    replace = ReplaceMefIdentifiedBySubjects()
    replace.process()
    assert replace.counts_len == (0, 0, 0, 0, 1)

    without_idref_gnd = deepcopy(contribution_person_response_data)
    without_idref_gnd['hits']['hits'][0]['metadata'].pop('idref')
    without_idref_gnd['hits']['hits'][0]['metadata'].pop('gnd')
    mock_contributions_mef_get.return_value = mock_response(
        json_data=without_idref_gnd)
    assert replace_idby_subjects() == (0, 0, 0, 1, 0)

    without_idref_gnd = deepcopy(contribution_person_response_data)
    without_idref_gnd['hits']['hits'][0]['metadata']['deleted'] = '2022'
    mock_contributions_mef_get.return_value = mock_response(
        json_data=without_idref_gnd)
    assert replace_idby_subjects() == (0, 0, 1, 0, 0)

    mock_contributions_mef_get.return_value = mock_response(
        json_data=contribution_person_response_data)
    assert replace_idby_subjects() == (1, 0, 0, 0, 0)

    # clean up
    doc.delete(dbcommit=True, delindex=True, force=True)
    for id in Contribution.get_all_ids():
        cont = Contribution.get_record_by_id(id)
        cont.delete(dbcommit=True, delindex=True, force=True)
Пример #6
0
    def post_process(self, metadata):
        """Post process the data.

        add extra data such as title statement.

        :param metadata: dictionary version of a record
        :return: the modified dictionary
        """
        metadata = Document.post_process(metadata)

        titles = metadata.get('title', [])
        text_title = title_format_text_head(titles, with_subtitle=False)
        if text_title:
            metadata['ui_title_text'] = text_title
        responsibility = metadata.get('responsibilityStatement', [])
        text_title = title_format_text_head(titles,
                                            responsibility,
                                            with_subtitle=False)
        if text_title:
            metadata['ui_title_text_responsibility'] = text_title
        contributions = metadata.get('contribution', [])
        new_contributions = []
        for contribution in contributions:
            agent = contribution['agent']
            agent_type = agent['type']
            agent_data = JsonRef.replace_refs(agent,
                                              loader=None).get('metadata')
            if agent_data:
                agent_data.pop('$schema', None)
                agent = agent_data
                agent['type'] = agent_type
            new_contributions.append({'agent': agent})
        if new_contributions:
            metadata['contribution'] = create_contributions(new_contributions)
        return metadata
Пример #7
0
def test_document_can_delete_harvested(app, ebook_1_data):
    """Test can delete for harvested records."""
    document = Document.create(ebook_1_data, delete_pid=True)
    can, reasons = document.can_delete
    assert document.harvested
    assert not can
    assert reasons['others']['harvested']
Пример #8
0
def marc21_to_part_of(self, key, value):
    """Get part_of."""
    part_of = {}
    subfield_x = not_repetitive(unimarc.bib_id,
                                'unimarc',
                                key,
                                value,
                                'x',
                                default='').strip()
    linked_pid = None
    if subfield_x:
        for pid in Document.get_document_pids_by_issn(subfield_x):
            linked_pid = pid
            break
    if linked_pid:
        part_of['document'] = {
            '$ref': f'https://ils.rero.ch/api/documents/{linked_pid}'
        }
        subfield_v = not_repetitive(unimarc.bib_id,
                                    'unimarc',
                                    key,
                                    value,
                                    'v',
                                    default='').strip()
        if subfield_v:
            part_of['numbering'] = subfield_v
        self['partOf'] = self.get('partOf', [])
        self['partOf'].append(part_of)
Пример #9
0
def doc_title_travailleuses(app):
    """Document with title with travailleuses."""
    data = {
        '$schema': 'https://ils.rero.ch/schemas/documents/'
                   'document-v0.0.1.json',
        'pid': 'doc_title_test2', 'type': 'book',
        'language': [{'type': 'bf:Language', 'value': 'fre'}],
        'title': [{
            'type': 'bf:Title',
            'mainTitle': [{
                'value': "Les travailleuses partent à l'école"
            }],
            'subtitle': [{'value': "lorsqu'un est bœuf ex aequo"}]

        }],
        'authors': [{
            'name': 'Müller, John', 'type': 'person'
        }, {
            'name': 'Corminbœuf, Gruß', 'type': 'person'
        }],
    }
    doc = Document.create(
        data=data,
        delete_pid=False,
        dbcommit=True,
        reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #10
0
def document(app, document_data):
    """Load document record."""
    doc = Document.create(data=document_data,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #11
0
def journal(app, journal_data):
    """Load journal record."""
    doc = Document.create(data=journal_data,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #12
0
def document_sion_items(app, document_sion_items_data):
    """Create document data for sion items."""
    doc = Document.create(data=document_sion_items_data,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #13
0
def document2_with_issn(app, journal2_data_with_issn):
    """Load document record."""
    doc = Document.create(data=journal2_data_with_issn,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #14
0
def ebook_4(app, ebook_4_data):
    """Load ebook 4 record."""
    doc = Document.create(data=ebook_4_data,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #15
0
def doc_title_travailleuses(app):
    """Document with title with travailleuses."""
    data = {
        '$schema':
        'https://bib.rero.ch/schemas/documents/'
        'document-v0.0.1.json',
        'pid':
        'doc_title_test2',
        'type': [{
            "main_type": "docmaintype_book",
            "subtype": "docsubtype_other_book"
        }],
        'language': [{
            'type': 'bf:Language',
            'value': 'fre'
        }],
        'title': [{
            'type':
            'bf:Title',
            'mainTitle': [{
                'value': "Les travailleuses partent à l'école 100"
            }],
            'subtitle': [{
                'value': "lorsqu'un est bœuf ex aequo"
            }]
        }],
        'contribution': [{
            'agent': {
                'preferred_name': 'Müller, John',
                'type': 'bf:Person'
            },
            'role': ['aut']
        }, {
            'agent': {
                'preferred_name': 'Corminbœuf, Gruß',
                'type': 'bf:Person'
            },
            'role': ['aut']
        }],
        "provisionActivity": [{
            "type": "bf:Publication",
            "startDate": 1818
        }],
        'issuance': {
            'main_type': 'rdami:1001',
            'subtype': 'materialUnit'
        },
        'adminMetadata': {
            'encodingLevel': 'Minimal level'
        }
    }
    doc = Document.create(data=data,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #16
0
def ebook_3(app, ebook_3_data):
    """Load ebook 3 record."""
    del ebook_3_data['electronicLocator']
    doc = Document.create(data=ebook_3_data,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #17
0
def test_document_create(db, document_data_tmp):
    """Test document creation."""
    ptty = Document.create(document_data_tmp, delete_pid=True)
    assert ptty == document_data_tmp
    assert ptty.get('pid') == '1'
    assert ptty.dumps()['editionStatement'][0]['_text'] == [
        {'language': 'chi-hani', 'value': '第3版 / 曾令良主编'},
        {'language': 'default', 'value': 'Di 3 ban / Zeng Lingliang zhu bian'}
    ]
    doc = Document.get_record_by_pid('1')
    assert doc == document_data_tmp

    fetched_pid = document_id_fetcher(ptty.id, ptty)
    assert fetched_pid.pid_value == '1'
    assert fetched_pid.pid_type == 'doc'

    with pytest.raises(IlsRecordError.PidAlradyUsed):
        new_doc = Document.create(doc)
Пример #18
0
def document2_ref(mock_persons_mef_get, app, document2_data_ref,
                  person2_response_data):
    """Load document with mef records reference."""
    mock_persons_mef_get.return_value = mock_response(
        json_data=person2_response_data)
    doc = Document.create(data=document2_data_ref,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #19
0
def document_ref(mock_resolver_get, mock_listener_get,
                 app, document_data_ref, mef_person_response_data):
    """."""
    mock_resolver_get.return_value = mock_response(
        json_data=mef_person_response_data
    )
    mock_listener_get.return_value = mock_response(
        json_data=mef_person_response_data
    )
    doc = Document.create(
        data=document_data_ref,
        delete_pid=False,
        dbcommit=True,
        reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #20
0
def _build_notification_email_context(loan, item, location):
    """Build the context used by the send_notification_to_location function.

    :param loan : the loan for which build context
    :param item : the item for which build context
    :param location : the item location
    """
    document_pid = Item.get_document_pid_by_item_pid(loan.item_pid)
    document = Document.get_record_by_pid(document_pid)
    pickup_location = Location.get_record_by_pid(
        loan.get('pickup_location_pid'))
    patron = Patron.get_record_by_pid(loan.patron_pid)

    # inherit holdings call number when possible
    issue_call_number = item.issue_inherited_first_call_number
    if issue_call_number:
        item['call_number'] = issue_call_number

    ctx = {
        'loan': loan.replace_refs().dumps(),
        'item': item.replace_refs().dumps(),
        'document': document.replace_refs().dumps(),
        'pickup_location': pickup_location,
        'item_location': location.dumps(),
        'patron': patron
    }
    library = pickup_location.get_library()
    ctx['pickup_location']['library'] = library
    ctx['item']['item_type'] = \
        ItemType.get_record_by_pid(item.item_type_circulation_category_pid)
    titles = [
        title for title in ctx['document'].get('title', [])
        if title['type'] == 'bf:Title'
    ]
    ctx['document']['title_text'] = \
        next(iter(titles or []), {}).get('_text')
    responsibility_statement = create_title_responsibilites(
        document.get('responsibilityStatement', []))
    ctx['document']['responsibility_statement'] = \
        next(iter(responsibility_statement or []), '')
    trans_date = ciso8601.parse_datetime(loan.get('transaction_date'))
    trans_date = trans_date\
        .replace(tzinfo=timezone.utc)\
        .astimezone(tz=library.get_timezone())
    ctx['loan']['transaction_date'] = \
        trans_date.strftime("%d.%m.%Y - %H:%M:%S")
    return ctx
Пример #21
0
def test_create_holdings_with_pattern(
        client, librarian_martigny_no_email, loc_public_martigny,
        journal, item_type_standard_martigny, document,
        json_header, holding_lib_martigny_data, pattern_yearly_one_level_data,
        holding_lib_martigny_w_patterns_data):
    """Test create holding type serial with patterns."""
    login_user_via_session(client, librarian_martigny_no_email.user)
    post_entrypoint = 'invenio_records_rest.hold_list'

    del holding_lib_martigny_data['pid']
    holding_lib_martigny_data['holdings_type'] = 'serial'
    res, _ = postdata(
        client,
        post_entrypoint,
        holding_lib_martigny_data
    )
    assert res.status_code == 403

    holding_lib_martigny_data['patterns'] = \
        pattern_yearly_one_level_data['patterns']

    # test will fail when creating a serial holding for a standard document.
    res, _ = postdata(
        client,
        post_entrypoint,
        holding_lib_martigny_data
    )
    assert res.status_code == 403

    # test will not fail when creating a standard holding for a journal doc.
    holding_lib_martigny_w_patterns_data['holdings_type'] = 'standard'
    # delete serials fields
    fields = [
        'enumerationAndChronology', 'notes', 'index', 'missing_issues',
        'supplementaryContent', 'patterns'
    ]
    for field in fields:
        del holding_lib_martigny_w_patterns_data[field]
    Holding.create(
        data=holding_lib_martigny_w_patterns_data,
        delete_pid=True,
        dbcommit=True,
        reindex=True)

    journal_pids = list(Document.get_all_serial_pids())
    assert journal_pids == [journal.pid]
Пример #22
0
def doc_title_travailleurs(app):
    """Document with title with travailleur."""
    data = {
        '$schema':
        'https://bib.rero.ch/schemas/documents/'
        'document-v0.0.1.json',
        'pid':
        'doc_title_test1',
        'type': [{
            "main_type": "docmaintype_book",
            "subtype": "docsubtype_other_book"
        }],
        'language': [{
            'type': 'bf:Language',
            'value': 'fre'
        }],
        'title': [{
            'type':
            'bf:Title',
            'mainTitle': [{
                'value': 'Les travailleurs assidus sont de retours'
            }],
            'subtitle': [{
                'value': 'les jeunes arrivent bientôt ?'
            }]
        }],
        "provisionActivity": [{
            "type": "bf:Publication",
            "startDate": 1818
        }],
        'issuance': {
            'main_type': 'rdami:1001',
            'subtype': 'materialUnit'
        },
        'adminMetadata': {
            'encodingLevel': 'Minimal level'
        }
    }
    doc = Document.create(data=data,
                          delete_pid=False,
                          dbcommit=True,
                          reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #23
0
def test_sru_documents(client, document_ref, contribution_person_data):
    """Test sru documents rest api."""
    api_url = url_for('api_sru.documents',
                      version='1.1',
                      operation='searchRetrieve',
                      query='al-Wajīz')
    res = client.get(api_url)
    assert res.status_code == 200
    xml_dict = get_xml_dict(res)
    assert 'searchRetrieveResponse' in xml_dict
    search_rr = xml_dict['searchRetrieveResponse']
    assert search_rr.get('echoedSearchRetrieveRequest') == {
        'maximumRecords': '100',
        'query': 'al-Wajīz',
        'query_es': 'al-Wajīz',
        'recordPacking': 'XML',
        'recordSchema': 'info:sru/schema/1/marcxml-v1.1-light',
        'resultSetTTL': '0',
        'startRecord': '1'
    }
    assert search_rr.get('numberOfRecords') == str(Document.count())
Пример #24
0
def doc_title_travailleurs(app):
    """Document with title with travailleur."""
    data = {
        '$schema': 'https://ils.rero.ch/schemas/documents/'
                   'document-v0.0.1.json',
        'pid': 'doc_title_test1', 'type': 'book',
        'language': [{'type': 'bf:Language', 'value': 'fre'}],
        'title': [{
            'type': 'bf:Title',
            'mainTitle': [{
                'value': 'Les travailleurs assidus sont de retours'
            }],
            'subtitle': [{'value': 'les jeunes arrivent bientôt ?'}]
        }]
    }
    doc = Document.create(
        data=data,
        delete_pid=False,
        dbcommit=True,
        reindex=True)
    flush_index(DocumentsSearch.Meta.index)
    return doc
Пример #25
0
def test_monitoring(app, document_sion_items_data, script_info):
    """Test monitoring."""
    for index in OperationLog.get_indices():
        flush_index(index)
    cli_output = [
        'DB - ES    type      count                      index      count',
        '----------------------------------------------------------------',
        '      0    acac          0               acq_accounts          0',
        '      0    acin          0               acq_invoices          0',
        '      0    acol          0            acq_order_lines          0',
        '      0    acor          0                 acq_orders          0',
        '      0    acre          0               acq_receipts          0',
        '      0    acrl          0          acq_receipt_lines          0',
        '      0    budg          0                    budgets          0',
        '      0    cipo          0              circ_policies          0',
        '      0    coll          0                collections          0',
        '      0    cont          0              contributions          0',
        '      1     doc          1                  documents          0',
        '      0    hold          0                   holdings          0',
        '      0    illr          0               ill_requests          0',
        '      0    item          0                      items          0',
        '      0    itty          0                 item_types          0',
        '      0     lib          0                  libraries          0',
        '         loanid          0',
        '      0     loc          0                  locations          0',
        '      0    lofi          0               local_fields          0',
        '      0   notif          0              notifications          0',
        '      0    oplg          0             operation_logs          1',
        '      0     org          0              organisations          0',
        '      0    ptre          0  patron_transaction_events          0',
        '      0    ptrn          0                    patrons          0',
        '      0    pttr          0        patron_transactions          0',
        '      0    ptty          0               patron_types          0',
        '      0    stat          0                      stats          0',
        '      0    tmpl          0                  templates          0',
        '      0    vndr          0                    vendors          0'
    ]

    mon = Monitoring(time_delta=0)
    assert mon.get_es_count('xxx') == 'No >>xxx<< in ES'
    assert mon.get_db_count('xxx') == 'No >>xxx<< in DB'
    doc = Document.create(
        data=document_sion_items_data,
        delete_pid=False,
        dbcommit=True,
        reindex=False
    )
    doc_pid = doc.pid
    assert mon.get_db_count('doc') == 1
    assert mon.get_es_count('documents') == 0
    assert mon.check() == {'doc': {'db_es': 1}}
    assert mon.missing('doc') == {'DB': [], 'ES': ['doc3'], 'ES duplicate': []}
    # not flushed by default
    flush_index('operation_logs')
    assert mon.info() == {
        'acac': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_accounts'},
        'acin': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_invoices'},
        'acol': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_order_lines'},
        'acor': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_orders'},
        'acre': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_receipts'},
        'acrl': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_receipt_lines'},
        'budg': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'budgets'},
        'cipo': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'circ_policies'},
        'coll': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'collections'},
        'cont': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'contributions'},
        'doc': {'db': 1, 'db-es': 1, 'es': 0, 'index': 'documents'},
        'hold': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'holdings'},
        'illr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'ill_requests'},
        'item': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'items'},
        'itty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'item_types'},
        'lib': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'libraries'},
        'loanid': {'db': 0},
        'loc': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'locations'},
        'lofi': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'local_fields'},
        'notif': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'notifications'},
        'oplg': {'db': 0, 'db-es': 0, 'es': 1, 'index': 'operation_logs'},
        'org': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'organisations'},
        'ptre': {'db': 0, 'db-es': 0, 'es': 0,
                 'index': 'patron_transaction_events'},
        'ptrn': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patrons'},
        'pttr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_transactions'},
        'ptty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_types'},
        'stat': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'stats'},
        'tmpl': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'templates'},
        'vndr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'vendors'}
    }
    assert mon.__str__().split('\n') == cli_output + ['']

    runner = CliRunner()
    res = runner.invoke(es_db_missing_cli, ['doc', '-d', 0], obj=script_info)
    assert res.output == f'ES missing doc: {doc.pid}\n'

    runner = CliRunner()
    res = runner.invoke(es_db_counts_cli, ['-m', '-d', 0], obj=script_info)
    assert res.output.split('\n') == cli_output + [
        f'ES missing doc: {doc.pid}',
        ''
    ]

    # we have to get the doc again because we lost the session after the use
    # of the CliRunner
    doc = Document.get_record_by_pid(doc_pid)
    doc.reindex()
    flush_index(DocumentsSearch.Meta.index)
    assert mon.get_es_count('documents') == 1
    assert mon.check() == {}
    assert mon.missing('doc') == {'DB': [], 'ES': [], 'ES duplicate': []}
    doc.delete(dbcommit=True)
    for index in OperationLog.get_indices():
        flush_index(index)
    assert mon.get_db_count('doc') == 0
    assert mon.get_es_count('documents') == 1
    assert mon.check() == {'doc': {'db_es': -1}}
    assert mon.missing('doc') == {'DB': ['doc3'], 'ES': [], 'ES duplicate': []}
Пример #26
0
def test_documents_import_bnf_ean(client):
    """Test document import from bnf."""
    res = client.get(url_for('api_imports.imports_search', q='ean:any:123'))
    assert res.status_code == 200
    data = get_json(res)
    assert not data.get('metadata')

    res = client.get(
        url_for('api_imports.imports_search', q='ean:any:9782070541270'))
    assert res.status_code == 200
    data = get_json(res).get('hits').get('hits')[0].get('metadata')
    assert data['pid'] == 'FRBNF370903960000006'
    assert Document.create(data)

    res = client.get(
        url_for('api_imports.imports_search', q='ean:any:9782072862014'))
    assert res.status_code == 200
    res_j = get_json(res)
    data = res_j.get('hits').get('hits')[0].get('metadata')
    data.update({
        "$schema":
        "https://ils.rero.ch/schemas/documents/document-v0.0.1.json"
    })
    assert Document.create(data)
    marc21_link = res_j.get('hits').get('hits')[0].get('links').get('marc21')

    res = client.get(marc21_link)
    data = get_json(res)
    assert data[0][0] == 'leader'

    res = client.get(url_for('api_imports.imports_search', q=''))
    assert res.status_code == 200
    assert get_json(res) == {
        'aggregations': {},
        'hits': {
            'hits': [],
            'remote_total': 0,
            'total': 0
        }
    }

    res = client.get(url_for('api_imports.imports_search', q='peter'))
    assert res.status_code == 200
    unfiltered_total = get_json(res)['hits']['remote_total']
    assert get_json(res)

    res = client.get(
        url_for('api_imports.imports_search',
                q='peter',
                year=2000,
                format='rerojson'))
    assert res.status_code == 200
    get_json(res)['hits']['remote_total'] < unfiltered_total

    res = client.get(
        url_for('api_imports.imports_search',
                q='peter',
                author='Peter Owen',
                format='rerojson'))
    assert res.status_code == 200
    get_json(res)['hits']['remote_total'] < unfiltered_total

    res = client.get(
        url_for('api_imports.imports_search',
                q='peter',
                type='book',
                format='rerojson'))
    assert res.status_code == 200
    get_json(res)['hits']['remote_total'] < unfiltered_total

    res = client.get(
        url_for('api_imports.imports_record', id='FRBNF370903960000006'))
    assert res.status_code == 200
    assert get_json(res).get('metadata', {}).get('identifiedBy')

    res = client.get(
        url_for('api_imports.imports_record',
                id='FRBNF370903960000006',
                format='rerojson'))
    assert res.status_code == 200
    assert get_json(res).get('metadata', {}).get('ui_title_text')

    res = client.get(
        url_for('api_imports.imports_record',
                id='FRBNF370903960000006',
                format='marc'))
    assert res.status_code == 200
    assert get_json(res)[1][1] == 'FRBNF370903960000006'
Пример #27
0
def test_document_can_delete(app, document_data_tmp):
    """Test can delete."""
    document = Document.create(document_data_tmp, delete_pid=True)
    assert document.get_links_to_me() == {}
    assert document.can_delete
Пример #28
0
def test_document_can_delete_harvested(app, ebook_1_data):
    """Test can delete for harvested records."""
    document = Document.create(ebook_1_data, delete_pid=True)
    assert document.harvested
    assert not document.can_delete
Пример #29
0
def test_contribution_format(db, document_data):
    """Test contribution format."""
    result = 'Nebehay, Christian Michael'
    doc = Document.create(document_data, delete_pid=True)
    assert contribution_format(doc.pid, 'en', 'global').startswith(result)
Пример #30
0
def test_publish_harvested_records(app, ebooks_1_xml, ebooks_2_xml,
                                   org_martigny, loc_online_martigny,
                                   item_type_online_martigny, org_sion,
                                   loc_online_sion, item_type_online_sion,
                                   capsys):
    """Test publish harvested records."""
    Identifier = namedtuple('Identifier', 'identifier')
    Record = namedtuple('Record', 'xml deleted header')
    records = []
    records.append(
        Record(xml=ebooks_1_xml,
               deleted=False,
               header=Identifier(identifier='record1')))
    records.append(
        Record(xml=ebooks_2_xml,
               deleted=False,
               header=Identifier(identifier='record2')))
    records.append(
        Record(xml=ebooks_2_xml,
               deleted=True,
               header=Identifier(identifier='record3')))

    kwargs = {'max': 100}
    publish_harvested_records(sender=None, records=records, kwargs=kwargs)
    flush_index(DocumentsSearch.Meta.index)
    flush_index(HoldingsSearch.Meta.index)

    assert Document.count() == 2
    doc1 = Document.get_record_by_pid('1')
    assert doc1.get('$schema') is not None
    assert doc1.get('identifiedBy') == [{
        'type': 'bf:Isbn',
        'value': '9782075118842'
    }, {
        'type': 'bf:Local',
        'value': 'cantook-EDEN502344'
    }, {
        'type': 'bf:Local',
        'source': 'cantook',
        'value': 'record1'
    }]
    assert len(list(Holding.get_holdings_pid_by_document_pid(doc1.pid))) == 1
    doc2 = Document.get_record_by_pid('2')
    assert doc2.get('$schema') is not None
    assert doc2.get('identifiedBy') == [{
        'type': 'bf:Isbn',
        'value': '9782811234157'
    }, {
        'type':
        'bf:Local',
        'value':
        'cantook-immateriel.frO1006810'
    }, {
        'type': 'bf:Local',
        'source': 'cantook',
        'value': 'record2'
    }]
    assert len(list(Holding.get_holdings_pid_by_document_pid(doc2.pid))) == 1

    # test update
    publish_harvested_records(sender=None, records=records)
    flush_index(DocumentsSearch.Meta.index)
    flush_index(HoldingsSearch.Meta.index)
    assert len(list(Holding.get_holdings_pid_by_document_pid(doc1.pid))) == 1
    assert len(list(Holding.get_holdings_pid_by_document_pid(doc2.pid))) == 1

    # test delete
    records = []
    del doc1['electronicLocator']
    records.append(doc1)
    doc2['electronicLocator'] = [{
        "content":
        "coverImage",
        "type":
        "relatedResource",
        "url":
        "http://images.immateriel.fr/covers/DEQ2C5A.png"
    }]
    records.append(doc2)

    create_records(records=records)
    flush_index(DocumentsSearch.Meta.index)
    flush_index(HoldingsSearch.Meta.index)
    assert len(list(Holding.get_holdings_pid_by_document_pid(doc1.pid))) == 0
    assert len(list(Holding.get_holdings_pid_by_document_pid(doc2.pid))) == 0

    assert 2 == delete_records(records=records)