def test_holding_create(db, es, document, org_martigny, loc_public_martigny, item_type_standard_martigny, holding_lib_martigny_data): """Test holding creation.""" next_pid = Holding.provider.identifier.next() holding = Holding.create(holding_lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True) next_pid += 1 assert holding == holding_lib_martigny_data assert holding.get('pid') == str(next_pid) holding = Holding.get_record_by_pid(str(next_pid)) assert holding == holding_lib_martigny_data fetched_pid = fetcher(holding.id, holding) assert fetched_pid.pid_value == str(next_pid) assert fetched_pid.pid_type == 'hold' search = HoldingsSearch() es_hit = next(search.filter('term', pid=holding.pid).source('pid').scan()) holding_record = Holding.get_record_by_pid(es_hit.pid) assert holding_record.organisation_pid == org_martigny.get('pid') # holdings does not exist assert not Holding.get_holdings_type_by_holding_pid('toto') # clean created data holding.delete(force=True, dbcommit=True, delindex=True)
def get_links_to_me(self, get_pids=False): """Record links. :param get_pids: if True list of linked pids if False count of linked records """ from rero_ils.modules.acq_orders.api import AcqOrdersSearch from rero_ils.modules.holdings.api import HoldingsSearch acq_orders_query = AcqOrdersSearch()\ .filter('term', vendor__pid=self.pid) acq_invoices_query = AcquisitionInvoicesSearch()\ .filter('term', vendor__pid=self.pid) hold_query = HoldingsSearch()\ .filter('term', vendor__pid=self.pid) links = {} if get_pids: acq_orders = sorted_pids(acq_orders_query) acq_invoices = sorted_pids(acq_invoices_query) holdings = sorted_pids(hold_query) else: acq_orders = acq_orders_query.count() acq_invoices = acq_invoices_query.count() holdings = hold_query.count() if acq_orders: links['acq_orders'] = acq_orders if acq_invoices: links['acq_invoices'] = acq_invoices if holdings: links['holdings'] = holdings return links
def upgrade_downgrade(action): """Upgrade or downgrade index holdings. Correct items_count and public_items_count for holdings of type serial. :param str action: upgrade or downgrade. """ index = HoldingsSearch.Meta.index query = HoldingsSearch()\ .filter('term', holdings_type='serial') \ .source(['pid']) ids = [(h.meta.id, h.pid) for h in query.scan()] count = 0 LOGGER.info(f'Indexing {len(ids)} records ....') for (_id, pid) in ids: document = Document.get(_id, index=index, using=current_search_client) items_count, public_items_count = get_counts(pid, action) document.update(items_count=items_count, public_items_count=public_items_count, index=index, using=current_search_client, refresh=True) count += 1 LOGGER.info(f'{count} records indexed.')
def test_holding_es_mapping(es, db, holding_lib_martigny, holding_lib_martigny_data): """Test holding elasticsearch mapping.""" search = HoldingsSearch() mapping = get_mapping(search.Meta.index) assert mapping Holding.create(holding_lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index)
def upgrade_downgrade(action): """Upgrade or downgrade index holdings. Correct items_count and public_items_count for holdings of type serial. :param str action: upgrade or downgrade. """ query = HoldingsSearch()\ .filter('term', holdings_type='serial') \ .source(['pid']) ids = [(h.meta.id, h.pid) for h in query.scan()] LOGGER.info(f'Indexing {len(ids)} records ....') errors = 0 for idx, (id, pid) in enumerate(ids): LOGGER.info(f'{idx} * Reindex holding: {pid}.') try: hold = Holding.get_record_by_id(id) hold.reindex() except Exception as err: LOGGER.error(f'{idx} * Reindex holding: {pid} {err}') errors += 1 return errors
def test_holding_create(db, es_clear, document, org_martigny, loc_public_martigny, item_type_standard_martigny, holding_lib_martigny_data): """Test holding creation.""" holding = Holding.create(holding_lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True) flush_index(HoldingsSearch.Meta.index) assert holding == holding_lib_martigny_data assert holding.get('pid') == '1' holding = Holding.get_record_by_pid('1') assert holding == holding_lib_martigny_data fetched_pid = fetcher(holding.id, holding) assert fetched_pid.pid_value == '1' assert fetched_pid.pid_type == 'hold' search = HoldingsSearch() holding = next(search.filter('term', pid=holding.pid).scan()) holding_record = Holding.get_record_by_pid(holding.pid) assert holding_record.organisation_pid == org_martigny.get('pid')
def test_holding_es_mapping(es, db, loc_public_martigny, item_type_standard_martigny, document, holding_lib_martigny_data): """Test holding elasticsearch mapping.""" search = HoldingsSearch() mapping = get_mapping(search.Meta.index) assert mapping holding = Holding.create(holding_lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index) # clean created data holding.delete(force=True, dbcommit=True, delindex=True)
def test_holding_organisation_pid(org_martigny, holding_lib_martigny): """Test organisation pid has been added during the indexing.""" search = HoldingsSearch() holding = next(search.filter('term', pid=holding_lib_martigny.pid).scan()) holding_record = Holding.get_record_by_pid(holding.pid) assert holding_record.organisation_pid == org_martigny.pid
def get_holdings_items(document_pid, organisation_pids=None, library_pids=None, location_pids=None): """Create Holding and Item informations. :param document_pid: document pid to use for holdings search :param organisation_pids: Which organisations items to add. :param library_pids: Which from libraries items to add. :param location_pids: Which from locations items to add. :returns: list of holding informations with associated organisation, library and location pid, name informations. """ def get_name(resource, pid): """Get name from resource. The name will be cached. :param resource: Resource class to use. :param pid: Pid for the resource to get the name from. :returns: name from the resource """ data = resource.get_record_by_pid(pid) if data: return data.get('name') results = [] if document_pid: holding_pids = Holding.get_holdings_pid_by_document_pid( document_pid=document_pid, with_masked=False) holding_pids = list(holding_pids) organisations = {} libraries = {} locations = {} query = HoldingsSearch().filter('terms', pid=holding_pids) if organisation_pids: query = query.filter( {'terms': { 'organisation.pid': organisation_pids }}) if library_pids: query = query.filter({'terms': {'library.pid': library_pids}}) if location_pids: query = query.filter({'terms': {'location.pid': location_pids}}) for hit in query.scan(): holding = hit.to_dict() organisation_pid = hit.organisation.pid if organisation_pid not in organisations: organisations[organisation_pid] = get_name( Organisation, organisation_pid) library_pid = hit.library.pid if library_pid not in libraries: libraries[library_pid] = get_name(Library, library_pid) location_pid = hit.location.pid if location_pid not in locations: locations[location_pid] = get_name(Location, location_pid) result = { 'organisation': { 'pid': organisation_pid, 'name': organisations[organisation_pid] }, 'library': { 'pid': library_pid, 'name': libraries[library_pid] }, 'location': { 'pid': location_pid, 'name': locations[location_pid] }, 'holdings': { 'call_number': holding.get('call_number'), 'second_call_number': holding.get('second_call_number'), 'enumerationAndChronology': holding.get('enumerationAndChronology'), 'electronic_location': holding.get('electronic_location', []), 'notes': holding.get('notes', []), 'supplementaryContent': holding.get('supplementaryContent'), 'index': holding.get('index'), 'missing_issues': holding.get('missing_issues'), } } if hit.holdings_type == 'standard': item_pids = Item.get_items_pid_by_holding_pid( hit.pid, with_masked=False) item_hits = ItemsSearch() \ .filter('terms', pid=list(item_pids)) \ .scan() for item_hit in item_hits: item_data = item_hit.to_dict() item_result = result item_result['item'] = { 'barcode': item_data.get('barcode'), 'all_number': item_data.get('all_number'), 'second_call_number': item_data.get('second_call_number'), 'enumerationAndChronology': item_data.get('enumerationAndChronology'), 'url': item_data.get('url'), 'notes': item_data.get('notes', []), } results.append(item_result) else: results.append(result) return results