def test_assay_upload_ingestion_success(clean_db, monkeypatch, caplog): """Check that the ingestion success method works as expected""" caplog.set_level(logging.DEBUG) new_user = Users.create(PROFILE) trial = TrialMetadata.create(TRIAL_ID, METADATA) assay_upload = UploadJobs.create( upload_type="ihc", uploader_email=EMAIL, gcs_file_map={}, metadata={PROTOCOL_ID_FIELD_NAME: TRIAL_ID}, gcs_xlsx_uri="", commit=False, ) clean_db.commit() # Ensure that success can't be declared from a starting state with pytest.raises(Exception, match="current status"): assay_upload.ingestion_success(trial) # Update assay_upload status to simulate a completed but not ingested upload assay_upload.status = UploadJobStatus.UPLOAD_COMPLETED.value assay_upload.ingestion_success(trial) # Check that status was updated and email wasn't sent by default db_record = UploadJobs.find_by_id(assay_upload.id) assert db_record.status == UploadJobStatus.MERGE_COMPLETED.value assert "Would send email with subject '[UPLOAD SUCCESS]" not in caplog.text # Check that email gets sent when specified assay_upload.ingestion_success(trial, send_email=True) assert "Would send email with subject '[UPLOAD SUCCESS]" in caplog.text
def test_filter_user_lookups(app, db, monkeypatch): """Check user GET-request role-based filtering""" monkeypatch.setattr(app.auth, "token_auth", fake_token_auth) client = app.test_client() # Create two new users with app.app_context(): Users.create(profile) Users.create(other_profile) # Check that a user can only look themselves up response = client.get(USERS, headers=AUTH_HEADER) assert response.status_code == 200 users = response.json["_items"] assert len(users) == 1 assert users[0]["email"] == profile["email"] filtered_response = client.get( USERS + '?where{"email": "%s"}' % EMAIL, headers=AUTH_HEADER ) assert filtered_response.status_code == 200 assert filtered_response.json["_items"] == response.json["_items"] # If the user tries to look up someone else, they get nothing back response = client.get( USERS + '?where={"email": "%s"}' % other_profile["email"], headers=AUTH_HEADER ) assert response.status_code == 200 assert len(response.json["_items"]) == 0 # Make a user an admin with app.app_context(): db.query(Users).filter_by(email=EMAIL).update({"role": "cidc-admin"}) db.commit() # Admins should be able to list all users response = client.get(USERS, headers=AUTH_HEADER) assert response.status_code == 200 assert len(response.json["_items"]) == 2
def test_create_upload_job(db): """Try to create an upload job""" new_user = Users.create(PROFILE) gcs_file_uris = ["my/first/wes/blob1", "my/first/wes/blob2"] metadata_json_patch = {"foo": "bar"} # Create a fresh upload job new_job = UploadJobs.create("dummy_assay", EMAIL, gcs_file_uris, metadata_json_patch) job = UploadJobs.find_by_id(new_job.id) assert_same_elements(new_job.gcs_file_uris, job.gcs_file_uris) assert job.status == "started"
def test_add_approval_date(app, db, monkeypatch): """Test that a user's approval_date is updated when their role is changed for the first time.""" monkeypatch.setattr(app.auth, "token_auth", fake_token_auth) # Create one registered admin and one new user with app.app_context(): db.add(Users(role="cidc-admin", approval_date=datetime.now(), **profile)) db.commit() Users.create(other_profile) client = app.test_client() def get_new_user(): response = client.get( USERS + '?where={"email": "%s"}' % other_profile["email"], headers=AUTH_HEADER, ) return response.json["_items"][0] def update_role_and_get_approval_date(role: str): new_user = get_new_user() response = client.patch( f"{USERS}/{new_user['id']}", headers={**AUTH_HEADER, "If-Match": new_user["_etag"]}, json={"role": role}, ) assert response.status_code == 200 updated_new_user = get_new_user() approval_date = updated_new_user.get("approval_date") assert approval_date is not None return approval_date # Approval date should be set on first role update first_approval = update_role_and_get_approval_date("developer") second_approval = update_role_and_get_approval_date("cidc-admin") assert first_approval == second_approval
def test_upload_job_no_file_map(clean_db): """Try to create an assay upload""" new_user = Users.create(PROFILE) metadata_patch = {PROTOCOL_ID_FIELD_NAME: TRIAL_ID} gcs_xlsx_uri = "xlsx/assays/wes/12:0:1.5123095" TrialMetadata.create(TRIAL_ID, METADATA) new_job = UploadJobs.create( prism.SUPPORTED_MANIFESTS[0], EMAIL, None, metadata_patch, gcs_xlsx_uri ) assert list(new_job.upload_uris_with_data_uris_with_uuids()) == [] job = UploadJobs.find_by_id_and_email(new_job.id, PROFILE["email"]) assert list(job.upload_uris_with_data_uris_with_uuids()) == []
def test_assay_upload_merge_extra_metadata(clean_db, monkeypatch): """Try to create an assay upload""" new_user = Users.create(PROFILE) TrialMetadata.create(TRIAL_ID, METADATA) assay_upload = UploadJobs.create( upload_type="assay_with_extra_md", uploader_email=EMAIL, gcs_file_map={}, metadata={ PROTOCOL_ID_FIELD_NAME: TRIAL_ID, "whatever": { "hierarchy": [ {"we just need a": "uuid-1", "to be able": "to merge"}, {"and": "uuid-2"}, ] }, }, gcs_xlsx_uri="", commit=False, ) assay_upload.id = 111 clean_db.commit() custom_extra_md_parse = MagicMock() custom_extra_md_parse.side_effect = lambda f: {"extra": f.read().decode()} monkeypatch.setattr( "cidc_schemas.prism.merger.EXTRA_METADATA_PARSERS", {"assay_with_extra_md": custom_extra_md_parse}, ) UploadJobs.merge_extra_metadata( 111, { "uuid-1": io.BytesIO(b"within extra md file 1"), "uuid-2": io.BytesIO(b"within extra md file 2"), }, session=clean_db, ) assert 1 == clean_db.query(UploadJobs).count() au = clean_db.query(UploadJobs).first() assert "extra" in au.metadata_patch["whatever"]["hierarchy"][0] assert "extra" in au.metadata_patch["whatever"]["hierarchy"][1]
def test_create_assay_upload(clean_db): """Try to create an assay upload""" new_user = Users.create(PROFILE) gcs_file_map = { "my/first/wes/blob1/2019-08-30T15:51:38.450978": "test-uuid-1", "my/first/wes/blob2/2019-08-30T15:51:38.450978": "test-uuid-2", } metadata_patch = {PROTOCOL_ID_FIELD_NAME: TRIAL_ID} gcs_xlsx_uri = "xlsx/assays/wes/12:0:1.5123095" # Should fail, since trial doesn't exist yet with pytest.raises(IntegrityError): UploadJobs.create("wes_bam", EMAIL, gcs_file_map, metadata_patch, gcs_xlsx_uri) clean_db.rollback() TrialMetadata.create(TRIAL_ID, METADATA) new_job = UploadJobs.create( "wes_bam", EMAIL, gcs_file_map, metadata_patch, gcs_xlsx_uri ) job = UploadJobs.find_by_id_and_email(new_job.id, PROFILE["email"]) assert len(new_job.gcs_file_map) == len(job.gcs_file_map) assert set(new_job.gcs_file_map) == set(job.gcs_file_map) assert job.status == "started" assert list(job.upload_uris_with_data_uris_with_uuids()) == [ ( "my/first/wes/blob1/2019-08-30T15:51:38.450978", "my/first/wes/blob1", "test-uuid-1", ), ( "my/first/wes/blob2/2019-08-30T15:51:38.450978", "my/first/wes/blob2", "test-uuid-2", ), ]
def test_duplicate_user(clean_db): """Ensure that a user won't be created twice""" Users.create(PROFILE) Users.create(PROFILE) assert clean_db.query(Users).count() == 1
def test_create_user(clean_db): """Try to create a user that doesn't exist""" Users.create(PROFILE) user = Users.find_by_email(EMAIL) assert user assert user.email == EMAIL