def create(cls, data, id_=None, **kwargs): """Create Loan record.""" data["$schema"] = current_jsonschemas.path_to_url(cls._schema) ref_builder = current_app.config.get("CIRCULATION_ITEM_REF_BUILDER") item_pid = data.get("item_pid") if ref_builder and item_pid: data["document_pid"] = get_document_pid_by_item_pid(item_pid) data["item"] = ref_builder(data["loan_pid"]) return super(Loan, cls).create(data, id_=id_, **kwargs)
def post_load(self, data): """Post load.""" data['$schema'] = current_jsonschemas.path_to_url(Video._schema) return data
def schema_from_context(_, context): """Get the record's schema from context.""" record = (context or {}).get('record', {}) return record.get("_schema", current_jsonschemas.path_to_url(Record._schema))
def get_record_schema(cls): """Get record schema.""" prefix = current_app.config['DEPOSIT_JSONSCHEMAS_PREFIX'] schema = cls._schema[len(prefix):] return current_jsonschemas.path_to_url(schema)
def clear(self): """Clear IlsRecord data.""" super().clear() self["$schema"] = current_jsonschemas.path_to_url(self._schema)
def create(cls, data, id_=None, **kwargs): """Create Location record""" data["$schema"] = current_jsonschemas.path_to_url(cls._schema) return super(Location, cls).create(data, id_=id_, **kwargs)
def create(cls, data, id_=None, **kwargs): """Create IlsRecord record.""" data["$schema"] = current_jsonschemas.path_to_url(cls._schema) return super().create(data, id_=id_, **kwargs)
def test_loan_operation_log(client, operation_log_data, loan_validated_martigny, librarian_martigny): """Test operation logs creation.""" login_user_for_view(client, librarian_martigny) operation_log = LoanOperationLog.create(deepcopy(loan_validated_martigny), index_refresh='wait_for') operation_log['$schema'] = current_jsonschemas.path_to_url( LoanOperationLog._schema) operation_log.validate() log_data = LoanOperationLog.get_record(operation_log.id) assert log_data['operation'] == 'create' assert log_data['user_name'] == 'Pedronni, Marie' assert log_data['date'] == loan_validated_martigny['transaction_date'] assert not log_data['loan']['override_flag'] assert log_data['loan']['transaction_channel'] == 'system' assert log_data['loan']['transaction_user']['name'] == 'Pedronni, Marie' assert log_data['loan']['transaction_location'][ 'name'] == 'Martigny Library Public Space' assert log_data['loan']['pickup_location'][ 'name'] == 'Martigny Library Public Space' assert log_data['loan']['patron'] == { 'pid': 'ptrn6', 'hashed_pid': 'e11ff43bff5be4cf70350e2d15149e29', 'name': 'Roduit, Louis', 'type': 'children', 'age': 74, 'postal_code': '1920', 'gender': 'other', 'local_codes': ['code1'] } assert log_data['loan']['item'] == { 'category': 'standard', 'call_number': '001313', 'document': { 'pid': 'doc1', 'title': 'titre en chinois. Part Number, Part Number = Titolo cinese : ' 'sottotitolo in cinese', 'type': 'docsubtype_other_book' }, 'holding': { 'pid': '1', 'location_name': 'Martigny Library Public Space' }, 'library_pid': 'lib1', 'pid': 'item5' } # Test SIP2 loan = deepcopy(loan_validated_martigny) loan['selfcheck_terminal_id'] = 'ABCDEF' operation_log = LoanOperationLog.create(loan, index_refresh='wait_for') operation_log['$schema'] = current_jsonschemas.path_to_url( LoanOperationLog._schema) operation_log.validate() log_data = LoanOperationLog.get_record(operation_log.id) assert log_data['loan']['transaction_channel'] == 'sip2' assert not log_data['loan'].get('transaction_user')
def create(cls, data, id_=None, **kwargs): """Create Lom record.""" data["$schema"] = current_jsonschemas.path_to_url('lom/lom-v1.0.0.json') return super(LomRecords, cls).create(data, id_=id_, **kwargs)
def schema_from_context(_, context, data, schema): """Get the record's schema from context.""" record = (context or {}).get('record', {}) return record.get('$schema', current_jsonschemas.path_to_url(schema))
def create(cls, data, id_=None, **kwargs): """Create Document record.""" data["$schema"] = current_jsonschemas.path_to_url(cls._schema) getattr(cls, 'validate_create', lambda x: x)(data) return super(IlsRecord, cls).create(data, id_=id_, **kwargs)
def bulk_records(records): """Records creation.""" n_updated = 0 n_rejected = 0 n_created = 0 record_schema = current_jsonschemas.path_to_url('documents/document-v0.0.1.json') item_schema = current_jsonschemas.path_to_url('items/item-v0.0.1.json') holding_schema = current_jsonschemas.path_to_url('holdings/holding-v0.0.1.json') host_url = current_app.config.get('RERO_ILS_APP_BASE_URL') url_api = '{host}/api/{doc_type}/{pid}' record_id_iterator = [] item_id_iterator = [] holding_id_iterator = [] indexer = RecordIndexer() start_time = datetime.now() for record in records: try: if record.get('frbr', False): document = record.get('document', {}) """ # check if already in Rero-ILS pid = None for identifier in document.get('identifiedBy') : if identifier.get('source') == 'VIRTUA' : bibid = identifier.get('value') query = DocumentsSearch().filter( 'term', identifiedBy__value=bibid ).source(includes=['pid']) try: pid = [r.pid for r in query.scan()].pop() except IndexError: pid = None if pid: # update the record # Do nothing for the moment continue else: """ document['$schema'] = record_schema created_time = datetime.now() document = Document.create( document, dbcommit=False, reindex=False ) record_id_iterator.append(document.id) uri_documents = url_api.format(host=host_url, doc_type='documents', pid=document.pid) map_holdings = {} for holding in record.get('holdings'): holding['$schema'] = holding_schema holding['document'] = { '$ref': uri_documents } holding['circulation_category'] = { '$ref': map_item_type(str(holding.get('circulation_category'))) } holding['location'] = { '$ref': map_locations(str(holding.get('location'))) } created_time = datetime.now() result = Holding.create( holding, dbcommit=False, reindex=False ) map_holdings.update({ '{location}#{cica}'.format( location = holding.get('location'), cica = holding.get('circulation_category')) : result.get('pid') } ) holding_id_iterator.append(result.id) for item in record.get('items'): item['$schema'] = item_schema item['document'] = { '$ref': uri_documents } item['item_type'] = { '$ref': map_item_type(str(item.get('item_type'))) } item['location'] = { '$ref': map_locations(str(item.get('location'))) } holding_pid = map_holdings.get( '{location}#{cica}'.format( location = item.get('location'), cica = item.get('item_type'))) item['holding'] = { '$ref': url_api.format(host=host_url, doc_type='holdings', pid=holding_pid) } result = Item.create( item, dbcommit=False, reindex=False ) item_id_iterator.append(result.id) n_created += 1 if n_created % 1000 == 0: execution_time = datetime.now() - start_time click.secho('{nb} created records in {execution_time}.' .format(nb=len(record_id_iterator), execution_time=execution_time), fg='white') start_time = datetime.now() db.session.commit() execution_time = datetime.now() - start_time click.secho('{nb} commited records in {execution_time}.' .format(nb=len(record_id_iterator), execution_time=execution_time), fg='white') start_time = datetime.now() click.secho('sending {n} holdings to indexer queue.' .format(n=len(holding_id_iterator)), fg='white') indexer.bulk_index(holding_id_iterator) click.secho('process queue...', fg='yellow') indexer.process_bulk_queue() click.secho('sending {n} items to indexer queue.' .format(n=len(item_id_iterator)), fg='white') indexer.bulk_index(item_id_iterator) click.secho('process queue...', fg='yellow') indexer.process_bulk_queue() click.secho('sending {n} documents to indexer queue.' .format(n=len(record_id_iterator)), fg='white') indexer.bulk_index(record_id_iterator) click.secho('process queue...', fg='yellow') indexer.process_bulk_queue() execution_time = datetime.now() - start_time click.secho('indexing records process in {execution_time}.' .format(execution_time=execution_time), fg='white') click.secho('processing next batch records.', fg='green') record_id_iterator.clear() holding_id_iterator.clear() item_id_iterator.clear() start_time = datetime.now() except Exception as e: n_rejected += 1 click.secho('Error processing record [{id}] : {e}' .format(id=record.get('_id'), e=e), fg='red') db.session.commit() indexer.bulk_index(holding_id_iterator) indexer.process_bulk_queue() indexer.bulk_index(item_id_iterator) indexer.process_bulk_queue() indexer.bulk_index(record_id_iterator) indexer.process_bulk_queue() return n_created
def update(self, other, **kwargs): super().update(other, **kwargs) self['$schema'] = current_jsonschemas.path_to_url('draft/records/record-v1.0.0.json')
def validate(self, **kwargs): self['$schema'] = current_jsonschemas.path_to_url('draft/records/record-v1.0.0.json') return super().validate(**kwargs)