def import_users_from_json(dump_file): """Imports additional user data from JSON.""" dump_file = dump_file[0] with click.progressbar(json.load(dump_file)) as bar: for record in bar: click.echo( 'Importing user "{0}({1})"...'.format( record["id"], record["email"] ) ) user = get_user_by_person_id(record["ccid"]) if not user: click.secho( "User {0}({1}) not synced via LDAP".format( record["id"], record["email"] ), fg="red", ) continue # todo uncomment when more data # raise UserMigrationError else: client_id = current_app.config["CERN_APP_OPENID_CREDENTIALS"][ "consumer_key" ] account = RemoteAccount.get( user_id=user.id, client_id=client_id ) extra_data = account.extra_data # add legacy_id information account.extra_data.update(legacy_id=record["id"], **extra_data) db.session.add(account) patron = Patron(user.id) PatronIndexer().index(patron) db.session.commit()
def test_patron_loans_view(app, patron1, testdata, client): """Test check for users update in sync command.""" db.session.add( ActionUsers.allow(retrieve_patron_loans_access_action, user=patron1)) db.session.commit() patron = Patron(patron1.id) PatronIndexer().index(patron) current_search.flush_and_refresh(index="*") login_user_via_session(client, email=patron1.email) resp = client.get(url_for("cds_ils_patron_loans.patron_loans", person_id=1)) assert resp.status_code == 200 expected_books_on_loan = [{ "barcode": "123456789-3", "end_date": "2018-07-28", "library": "Main Library", "location": "Route de Meyrin", "title": "Prairie Fires: The American Dreams of " "Laura Ingalls Wilder", }] expected_loan_requests = [{ "request_start_date": "2018-06-28", "request_end_date": "2018-07-28", "library": "Main Library", "location": "Route de Meyrin", "title": "The Gulf: The Making of An American Sea", }] data = resp.json assert data["books_on_loan"] == expected_books_on_loan assert data["loan_requests"] == expected_loan_requests # test extra_info assert patron.extra_info assert data["person_id"] == patron.extra_info["person_id"] assert data["department"] == patron.extra_info["department"]
def test_data_migration(app, db, es_clear, patrons): """Prepare minimal data for migration tests.""" data = load_json_from_datadir("locations.json") locations = _create_records(db, data, Location, LOCATION_PID_TYPE) data = load_json_from_datadir("internal_locations.json") int_locs = _create_records( db, data, InternalLocation, INTERNAL_LOCATION_PID_TYPE ) data = load_json_from_datadir("documents.json") documents = _create_records(db, data, Document, DOCUMENT_PID_TYPE) data = load_json_from_datadir("items.json") items = _create_records(db, data, Item, ITEM_PID_TYPE) data = load_json_from_datadir("ill_libraries.json") ill_libraries = _create_records(db, data, Provider, PROVIDER_PID_TYPE) data = load_json_from_datadir("vendors.json") vendors = _create_records(db, data, Provider, PROVIDER_PID_TYPE) # index ri = RecordIndexer() for rec in ( locations + int_locs + documents + items + ill_libraries + vendors ): ri.index(rec) # wait for indexing time.sleep(1) create_default_records() patron = Patron(patrons[0].id) PatronIndexer().index(patron) current_search.flush_and_refresh(index="*")