def test_operation_log_es_mapping(item_lib_sion, operation_log_1_data):
    """Test operation log elasticsearch mapping."""
    search = OperationLogsSearch()
    mapping = get_mapping(search.Meta.index)
    assert mapping
    OperationLog.create(
        operation_log_1_data,
        dbcommit=True,
        reindex=True,
        delete_pid=True)
    assert mapping == get_mapping(search.Meta.index)

    count = search.query(
        'query_string', query=OperationLogOperation.CREATE
    ).count()
    assert count == 3

    count = search.query(
        'query_string', query=OperationLogOperation.UPDATE
    ).count()
    assert count == 4

    count = search.query(
        'match',
        **{'user_name': 'updated_user'}).\
        count()
    assert count == 1
Ejemplo n.º 2
0
def create_operation_logs(infile, lazy, size):
    """Load operation log records in reroils.

    :param infile: Json operation log file.
    :param lazy: lazy reads file
    """
    click.secho('Load operation log records:', fg='green')
    if lazy:
        # try to lazy read json file (slower, better memory management)
        data = read_json_record(infile)
    else:
        # load everything in memory (faster, bad memory management)
        data = json.load(infile)
    index_count = 0
    with click.progressbar(data) as bar:
        records = []
        for oplg in bar:
            if not (index_count + 1) % size:
                OperationLog.bulk_index(records)
                records = []
            records.append(oplg)
            index_count += 1
        # the rest of the records
        if records:
            OperationLog.bulk_index(records)
            index_count += len(records)
    click.echo(f'created {index_count} operation logs.')
Ejemplo n.º 3
0
def migrate_virtua_operation_logs(infile, verbose, debug, lazy):
    """Migrate Virtua operation log records in reroils.

    :param infile: Json operation log file.
    :param lazy: lazy reads file
    """
    enabled_logs = current_app.config.get('RERO_ILS_ENABLE_OPERATION_LOG')
    click.secho('Migrate Virtua operation log records:', fg='green')
    if lazy:
        # try to lazy read json file (slower, better memory management)
        data = read_json_record(infile)
    else:
        # load everything in memory (faster, bad memory management)
        data = json.load(infile)
    index_count = 0
    with click.progressbar(data) as bar:
        for oplg in bar:
            try:
                operation = oplg.get('operation')
                resource = extracted_data_from_ref(
                    oplg.get('record').get('$ref'), data='resource')
                pid_type = enabled_logs.get(resource)
                if pid_type and operation == OperationLogOperation.CREATE:
                    # The virtua create operation log overrides the reroils
                    # create operation log, the method to use is UPDATE
                    record_pid = extracted_data_from_ref(
                        oplg.get('record').get('$ref'), data='pid')

                    create_rec = \
                        OperationLog.get_create_operation_log_by_resource_pid(
                            pid_type, record_pid)
                    if create_rec:
                        create_rec.update(oplg, dbcommit=True, reindex=True)
                elif pid_type and operation == OperationLogOperation.UPDATE:
                    # The virtua update operation log is a new entry in the
                    # reroils operation log, the method to use is CREATE
                    OperationLog.create(data=oplg, dbcommit=True, reindex=True)
            except Exception:
                pass
        index_count += len(data)
    click.echo(f'created {index_count} operation logs.')
Ejemplo n.º 4
0
def test_operation_logs_es_mapping(db, item_lib_sion, operation_log_1_data):
    """Test operation logs elasticsearch mapping."""
    search = OperationLogsSearch()
    mapping = get_mapping(search.Meta.index)
    assert mapping
    oplg = OperationLog.create(operation_log_1_data, dbcommit=True,
                               reindex=True, delete_pid=True)
    flush_index(OperationLogsSearch.Meta.index)
    assert mapping == get_mapping(search.Meta.index)

    assert oplg == operation_log_1_data
    assert oplg.get('pid') == '7'

    oplg = OperationLog.get_record_by_pid('7')
    assert oplg == operation_log_1_data

    fetched_pid = fetcher(oplg.id, oplg)
    assert fetched_pid.pid_value == '7'
    assert fetched_pid.pid_type == 'oplg'

    assert oplg.get('operation') == OperationLogOperation.UPDATE
Ejemplo n.º 5
0
def test_operation_bulk_index(client, es_clear, operation_log_data):
    """Test operation logs bulk creation."""
    data = []
    for date in [
        '2020-01-21T09:51:52.879533+00:00',
        '2020-02-21T09:51:52.879533+00:00',
        '2020-03-21T09:51:52.879533+00:00',
        '2020-04-21T09:51:52.879533+00:00',
        '2021-01-21T09:51:52.879533+00:00',
        '2021-02-21T09:51:52.879533+00:00'
    ]:
        tmp = deepcopy(operation_log_data)
        tmp['date'] = date
        data.append(tmp)
    OperationLog.bulk_index(data)
    # flush the index for the test
    current_search.flush_and_refresh(OperationLog.index_name)
    assert OperationLog.get_indices() == set((
        'operation_logs-2020',
        'operation_logs-2021'
    ))
    with pytest.raises(Exception) as excinfo:
        data[0]['operation'] = dict(name='foo')
        OperationLog.bulk_index(data)
        assert "BulkIndexError" in str(excinfo.value)
    # clean up the index
    assert OperationLog.delete_indices()
Ejemplo n.º 6
0
def test_operation_create(client, es_clear, operation_log_data):
    """Test operation logs creation."""
    oplg = OperationLog.create(operation_log_data, index_refresh='wait_for')
    assert oplg
    assert oplg.id
    # need to compare with dumps as it has resolve $refs
    data = OperationLog.get_record(oplg.id)
    del data['_created']
    del data['_updated']
    assert data == OperationLog(operation_log_data).dumps()
    tmp = deepcopy(operation_log_data)
    tmp['date'] = '2020-01-21T09:51:52.879533+00:00'
    oplg2 = OperationLog.create(tmp, index_refresh='wait_for')
    assert OperationLog.get_indices() == set((
        'operation_logs-2020',
        f'operation_logs-{datetime.now().year}'
    ))
    assert OperationLog.get_record(oplg.id)
    assert OperationLog.get_record(oplg2.id)
    # clean up the index
    assert OperationLog.delete_indices()
Ejemplo n.º 7
0
def test_operation_log_on_ill_request(client, ill_request_martigny,
                                      librarian_martigny):
    """Test operation log on ILL request."""
    # Using the ``ill_request_martigny`` fixtures, an operation log is created
    # for 'create' operation. Check this operation log to check if special
    # additional informations are include into this OpLog.
    login_user_via_session(client, librarian_martigny.user)

    fake_data = {'date': datetime.now().isoformat()}
    oplg_index = OperationLog.get_index(fake_data)
    flush_index(oplg_index)

    q = f'record.type:illr AND record.value:{ill_request_martigny.pid}'
    es_url = url_for('invenio_records_rest.oplg_list', q=q, sort='mostrecent')
    res = client.get(es_url)
    data = get_json(res)
    assert data['hits']['total']['value'] == 1
    metadata = data['hits']['hits'][0]['metadata']
    assert metadata['operation'] == OperationLogOperation.CREATE
    assert 'ill_request' in metadata
    assert 'status' in metadata['ill_request']
Ejemplo n.º 8
0
def test_update(app, es_clear, operation_log_data, monkeypatch):
    """Test update log."""
    operation_log = OperationLog.create(deepcopy(operation_log_data),
                                        index_refresh='wait_for')

    log_data = OperationLog.get_record(operation_log.id)
    assert log_data['record']['value'] == 'item4'

    # Update OK
    log_data['record']['value'] = '1234'
    OperationLog.update(log_data.id, log_data['date'], log_data)
    log_data = OperationLog.get_record(operation_log.id)
    assert log_data['record']['value'] == '1234'

    # Update KO
    monkeypatch.setattr(
        'elasticsearch_dsl.Document.update', lambda *args, **kwargs: 'error')
    with pytest.raises(Exception) as exception:
        OperationLog.update(log_data.id, log_data['date'], log_data)
        assert str(exception) == 'Operation log cannot be updated.'
Ejemplo n.º 9
0
def test_operation_log_jsonresolver(item_lib_martigny):
    """Test operation logs json resolver."""
    oplg = OperationLog.get_record_by_pid('1')
    rec = Record.create({
        'operation_log': {
            '$ref': 'https://ils.rero.ch/api/operation_logs/1'
        }
    })
    assert rec.replace_refs().get('operation_log') == \
        {'pid': '1', 'type': 'oplg'}

    # deleted record
    oplg.delete()
    with pytest.raises(JsonRefError):
        rec.replace_refs().dumps()

    # non existing record
    rec = Record.create({
        'operation_logs': {
            '$ref': 'https://ils.rero.ch/api/operation_logs/n_e'
        }
    })
    with pytest.raises(JsonRefError):
        rec.replace_refs().dumps()
Ejemplo n.º 10
0
def test_monitoring(app, document_sion_items_data, script_info):
    """Test monitoring."""
    for index in OperationLog.get_indices():
        flush_index(index)
    cli_output = [
        'DB - ES    type      count                      index      count',
        '----------------------------------------------------------------',
        '      0    acac          0               acq_accounts          0',
        '      0    acin          0               acq_invoices          0',
        '      0    acol          0            acq_order_lines          0',
        '      0    acor          0                 acq_orders          0',
        '      0    acre          0               acq_receipts          0',
        '      0    acrl          0          acq_receipt_lines          0',
        '      0    budg          0                    budgets          0',
        '      0    cipo          0              circ_policies          0',
        '      0    coll          0                collections          0',
        '      0    cont          0              contributions          0',
        '      1     doc          1                  documents          0',
        '      0    hold          0                   holdings          0',
        '      0    illr          0               ill_requests          0',
        '      0    item          0                      items          0',
        '      0    itty          0                 item_types          0',
        '      0     lib          0                  libraries          0',
        '         loanid          0',
        '      0     loc          0                  locations          0',
        '      0    lofi          0               local_fields          0',
        '      0   notif          0              notifications          0',
        '      0    oplg          0             operation_logs          1',
        '      0     org          0              organisations          0',
        '      0    ptre          0  patron_transaction_events          0',
        '      0    ptrn          0                    patrons          0',
        '      0    pttr          0        patron_transactions          0',
        '      0    ptty          0               patron_types          0',
        '      0    stat          0                      stats          0',
        '      0    tmpl          0                  templates          0',
        '      0    vndr          0                    vendors          0'
    ]

    mon = Monitoring(time_delta=0)
    assert mon.get_es_count('xxx') == 'No >>xxx<< in ES'
    assert mon.get_db_count('xxx') == 'No >>xxx<< in DB'
    doc = Document.create(
        data=document_sion_items_data,
        delete_pid=False,
        dbcommit=True,
        reindex=False
    )
    doc_pid = doc.pid
    assert mon.get_db_count('doc') == 1
    assert mon.get_es_count('documents') == 0
    assert mon.check() == {'doc': {'db_es': 1}}
    assert mon.missing('doc') == {'DB': [], 'ES': ['doc3'], 'ES duplicate': []}
    # not flushed by default
    flush_index('operation_logs')
    assert mon.info() == {
        'acac': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_accounts'},
        'acin': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_invoices'},
        'acol': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_order_lines'},
        'acor': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_orders'},
        'acre': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_receipts'},
        'acrl': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_receipt_lines'},
        'budg': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'budgets'},
        'cipo': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'circ_policies'},
        'coll': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'collections'},
        'cont': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'contributions'},
        'doc': {'db': 1, 'db-es': 1, 'es': 0, 'index': 'documents'},
        'hold': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'holdings'},
        'illr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'ill_requests'},
        'item': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'items'},
        'itty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'item_types'},
        'lib': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'libraries'},
        'loanid': {'db': 0},
        'loc': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'locations'},
        'lofi': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'local_fields'},
        'notif': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'notifications'},
        'oplg': {'db': 0, 'db-es': 0, 'es': 1, 'index': 'operation_logs'},
        'org': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'organisations'},
        'ptre': {'db': 0, 'db-es': 0, 'es': 0,
                 'index': 'patron_transaction_events'},
        'ptrn': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patrons'},
        'pttr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_transactions'},
        'ptty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_types'},
        'stat': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'stats'},
        'tmpl': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'templates'},
        'vndr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'vendors'}
    }
    assert mon.__str__().split('\n') == cli_output + ['']

    runner = CliRunner()
    res = runner.invoke(es_db_missing_cli, ['doc', '-d', 0], obj=script_info)
    assert res.output == f'ES missing doc: {doc.pid}\n'

    runner = CliRunner()
    res = runner.invoke(es_db_counts_cli, ['-m', '-d', 0], obj=script_info)
    assert res.output.split('\n') == cli_output + [
        f'ES missing doc: {doc.pid}',
        ''
    ]

    # we have to get the doc again because we lost the session after the use
    # of the CliRunner
    doc = Document.get_record_by_pid(doc_pid)
    doc.reindex()
    flush_index(DocumentsSearch.Meta.index)
    assert mon.get_es_count('documents') == 1
    assert mon.check() == {}
    assert mon.missing('doc') == {'DB': [], 'ES': [], 'ES duplicate': []}
    doc.delete(dbcommit=True)
    for index in OperationLog.get_indices():
        flush_index(index)
    assert mon.get_db_count('doc') == 0
    assert mon.get_es_count('documents') == 1
    assert mon.check() == {'doc': {'db_es': -1}}
    assert mon.missing('doc') == {'DB': ['doc3'], 'ES': [], 'ES duplicate': []}
Ejemplo n.º 11
0
def test_operation_log_on_item(client, item_lib_martigny_data_tmp,
                               librarian_martigny, json_header,
                               item_lib_martigny):
    """Test operation log on Item."""
    login_user_via_session(client, librarian_martigny.user)

    # Get the operation log index
    fake_data = {'date': datetime.now().isoformat()}
    oplg_index = OperationLog.get_index(fake_data)

    # STEP #1 : Create an item. This will generate an operation log
    item_data = deepcopy(item_lib_martigny_data_tmp)
    del item_data['pid']
    item = Item.create(item_data, dbcommit=True, reindex=True)
    flush_index(oplg_index)

    q = f'record.type:item AND record.value:{item.pid}'
    es_url = url_for('invenio_records_rest.oplg_list', q=q, sort='mostrecent')
    res = client.get(es_url)
    data = get_json(res)
    assert data['hits']['total']['value'] == 1
    metadata = data['hits']['hits'][0]['metadata']
    assert metadata['operation'] == OperationLogOperation.CREATE

    # STEP #2 : Update the item ``price`` attribute.
    #   As any changes on this attribute must be logged, a new operation log
    #   will be generated.
    item['price'] = 10
    item = item.update(item, dbcommit=True, reindex=True)
    flush_index(oplg_index)

    res = client.get(es_url)
    data = get_json(res)
    assert data['hits']['total']['value'] == 2
    metadata = data['hits']['hits'][0]['metadata']
    assert metadata['operation'] == OperationLogOperation.UPDATE

    # STEP #3 : Update the item ``status`` attribute.
    #   This attribute doesn't need to be tracked. So if it's the only change
    #   on this record then no OpLog should be created.
    item['status'] = ItemStatus.EXCLUDED
    item = item.update(item, dbcommit=True, reindex=True)
    flush_index(oplg_index)

    res = client.get(es_url)
    data = get_json(res)
    assert data['hits']['total']['value'] == 2

    # STEP #4 : Update the item ``status`` and ``price`` attributes.
    #   As we update at least one attribute that need to be tracked, this
    #   update will generate a new OpLog (UPDATE)
    item['status'] = ItemStatus.AT_DESK
    item['price'] = 12
    item = item.update(item, dbcommit=True, reindex=True)
    flush_index(oplg_index)

    res = client.get(es_url)
    data = get_json(res)
    assert data['hits']['total']['value'] == 3
    metadata = data['hits']['hits'][0]['metadata']
    assert metadata['operation'] == OperationLogOperation.UPDATE

    # STEP #5 : Delete the item
    #   This will generate the last OpLog about the item.
    item.delete(dbcommit=True, delindex=True)
    flush_index(oplg_index)

    res = client.get(es_url)
    data = get_json(res)
    assert data['hits']['total']['value'] == 4
    metadata = data['hits']['hits'][0]['metadata']
    assert metadata['operation'] == OperationLogOperation.DELETE
Ejemplo n.º 12
0
def test_operation_log_es_mapping(item_lib_sion, operation_log_data):
    """Test operation log elasticsearch mapping."""
    mapping = get_mapping(OperationLog.index_name)
    assert mapping
    OperationLog.create(operation_log_data)
    assert mapping == get_mapping(OperationLog.index_name)
Ejemplo n.º 13
0
def operation_log(operation_log_data, item_lib_sion):
    """Load operation log record."""
    return OperationLog.create(operation_log_data, index_refresh=True)
Ejemplo n.º 14
0
def destroy_operation_logs():
    """Removes all the operation logs data."""
    OperationLog.delete_indices()
    click.secho('All operations logs have been removed', fg='green')