def test_add_person_conflicting_external_id_different_state(self) -> None: # Arrange with SessionFactory.using_database( self.database_key, autocommit=False) as arrange_session: db_external_id = generate_external_id( state_code="OTHER_STATE_CODE", external_id=self.EXTERNAL_ID_1, id_type=self.ID_TYPE_1, ) db_person = generate_person(state_code="OTHER_STATE_CODE", external_ids=[db_external_id]) arrange_session.add(db_person) arrange_session.commit() db_external_id_duplicated = generate_external_id( state_code=self.state_code, external_id=self.EXTERNAL_ID_1, id_type=self.ID_TYPE_1, ) db_person_new = generate_person( state_code=self.state_code, external_ids=[db_external_id_duplicated]) # Act with SessionFactory.using_database(self.database_key) as session: session.add(db_person_new) session.flush()
def test_add_court_case_conflicting_external_id_person_id_different_state( self, ) -> None: # Arrange with SessionFactory.using_database( self.database_key) as arrange_session: db_person = generate_person(state_code=self.state_code) db_court_case = generate_court_case( person=db_person, external_id=self.EXTERNAL_ID_1, state_code=self.state_code, ) arrange_session.add(db_court_case) db_person_other_state = generate_person( state_code=self.OTHER_STATE_CODE) db_court_case_dupe = generate_court_case( person=db_person_other_state, external_id=self.EXTERNAL_ID_1, state_code=self.OTHER_STATE_CODE, ) # Act with SessionFactory.using_database(self.database_key) as session: session.add(db_court_case_dupe) session.flush()
def test_raw_file_metadata_normalized_file_name_unique_constraint( self) -> None: with SessionFactory.using_database(self.database_key, autocommit=False) as session: raw_metadata_1 = schema.DirectIngestRawFileMetadata( region_code="us_xx_yyyy", file_tag="file_tag", discovery_time=datetime.datetime(2019, 10, 11), normalized_file_name="foo.txt", datetimes_contained_upper_bound_inclusive=datetime.datetime( 2019, 10, 10), ) raw_metadata_2 = schema.DirectIngestRawFileMetadata( region_code="us_xx_yyyy", file_tag="file_tag", discovery_time=datetime.datetime(2019, 11, 12), normalized_file_name="foo.txt", datetimes_contained_upper_bound_inclusive=datetime.datetime( 2019, 11, 11), ) session.add(raw_metadata_1) session.add(raw_metadata_2) with self.assertRaises(IntegrityError): session.commit() with SessionFactory.using_database(self.database_key, autocommit=False) as session: self.assertEqual([], session.query( schema.DirectIngestRawFileMetadata).all())
def test_add_object_conflicting_external_id_no_flush( self, generate_func) -> None: # Arrange with SessionFactory.using_database( self.database_key) as arrange_session: db_external_id = generate_external_id( state_code=self.state_code, external_id=self.EXTERNAL_ID_1, id_type=self.ID_TYPE_1, ) db_person = generate_person(state_code=self.state_code, external_ids=[db_external_id]) db_object_new = generate_func(db_person, external_id=self.EXTERNAL_ID_1, state_code=self.state_code) arrange_session.add(db_object_new) db_external_id = generate_external_id( state_code=self.state_code, external_id=self.EXTERNAL_ID_2, id_type=self.ID_TYPE_1, ) db_person = generate_person(state_code=self.state_code, external_ids=[db_external_id]) db_object_new = generate_func(db_person, external_id=self.EXTERNAL_ID_1, state_code=self.state_code) # Act with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(db_object_new) with self.assertRaises(sqlalchemy.exc.IntegrityError): session.commit()
def testStateRecordTreeSnapshotUpdate(self): person = generate_schema_state_person_obj_tree() ingest_time_1 = datetime.datetime(2018, 7, 30) self._commit_person(person, SystemLevel.STATE, ingest_time_1) all_schema_objects = self._get_all_schema_objects_in_db( state_schema.StatePerson, state_schema, []) for schema_object in all_schema_objects: self._assert_expected_snapshots_for_schema_object( schema_object, [ingest_time_1]) # Commit an update to the StatePerson with SessionFactory.using_database(self.database_key, autocommit=False) as update_session: person = one(update_session.query(state_schema.StatePerson).all()) person.full_name = "new name" ingest_time_2 = datetime.datetime(2018, 7, 31) self._commit_person(person, SystemLevel.STATE, ingest_time_2) # Check that StatePerson had a new history table row written, but not # its child SentenceGroup. with SessionFactory.using_database(self.database_key, autocommit=False) as assert_session: person = one(assert_session.query(state_schema.StatePerson).all()) sentence_group = one( assert_session.query(state_schema.StateSentenceGroup).all()) self._assert_expected_snapshots_for_schema_object( person, [ingest_time_1, ingest_time_2]) self._assert_expected_snapshots_for_schema_object( sentence_group, [ingest_time_1])
def test_clients_for_officer(self) -> None: officer_1 = generate_fake_officer("id_1") officer_2 = generate_fake_officer("id_2") officer_3 = generate_fake_officer("id_3") auth_store = AuthorizationStore() user_context_1 = UserContext( email=officer_1.email_address, authorization_store=auth_store, current_user=officer_1, ) user_context_2 = UserContext( email=officer_2.email_address, authorization_store=auth_store, current_user=officer_2, ) user_context_3 = UserContext( email=officer_3.email_address, authorization_store=auth_store, current_user=officer_3, ) client_1 = generate_fake_client("client_1", supervising_officer_id="id_1") client_2 = generate_fake_client("client_2", supervising_officer_id="id_1") client_3 = generate_fake_client("client_3", supervising_officer_id="id_2") with SessionFactory.using_database(self.database_key) as session: session.expire_on_commit = False session.add(officer_1) session.add(officer_2) session.add(officer_3) session.add(client_1) session.add(client_2) session.add(client_3) with SessionFactory.using_database(self.database_key, autocommit=False) as read_session: self.assertEqual( len( CaseTriageQuerier.clients_for_officer( read_session, user_context_1)), 2, ) self.assertEqual( len( CaseTriageQuerier.clients_for_officer( read_session, user_context_2)), 1, ) self.assertEqual( len( CaseTriageQuerier.clients_for_officer( read_session, user_context_3)), 0, )
def test_insert_case_update_for_person(self) -> None: with SessionFactory.using_database(self.database_key, autocommit=False) as commit_session: CaseUpdatesInterface.update_case_for_person( commit_session, self.mock_context, self.mock_client, CaseUpdateActionType.DISCHARGE_INITIATED, ) with SessionFactory.using_database(self.database_key, autocommit=False) as read_session: self.assertEqual(len(read_session.query(CaseUpdate).all()), 1)
def test_fetch_officer_id_happy_path(self) -> None: officer_1 = generate_fake_officer("id_1", "*****@*****.**") officer_2 = generate_fake_officer("id_2", "*****@*****.**") with SessionFactory.using_database(self.database_key) as session: session.add(officer_1) session.add(officer_2) with SessionFactory.using_database(self.database_key, autocommit=False) as read_session: first_fetch = CaseTriageQuerier.officer_for_email( read_session, "*****@*****.**") self.assertEqual(first_fetch.external_id, "id_1") second_fetch = CaseTriageQuerier.officer_for_email( read_session, "*****@*****.**") self.assertEqual(second_fetch.external_id, "id_2")
def testWrite_SingleCountWithDateAndAllDemographics(self) -> None: params = { "jid": "01001001", "ethnicity": Ethnicity.HISPANIC.value, "gender": Gender.FEMALE.value, "race": Race.BLACK.value, "count": 311, "date": "2019-01-01", } headers = {"X-Appengine-Cron": "test-cron"} response = self.client.get(f"/single_count?{urlencode(params)}", headers=headers) self.assertEqual(response.status_code, 200) # Assert with SessionFactory.using_database(self.database_key, autocommit=False) as session: query = session.query(SingleCountAggregate) result = one(query.all()) self.assertEqual(result.count, params["count"]) date_str = params["date"] if not isinstance(date_str, str): raise ValueError( f"Unexpected type for date_str: [{type(date_str)}]") self.assertEqual(result.date, str_field_utils.parse_date(date_str)) self.assertEqual(result.ethnicity, params["ethnicity"]) self.assertEqual(result.gender, params["gender"]) self.assertEqual(result.race, params["race"])
def register_ingest_file_split( self, original_file_metadata: DirectIngestIngestFileMetadata, path: GcsfsFilePath, ) -> DirectIngestIngestFileMetadata: self._check_is_ingest_view_file_path(path) with SessionFactory.using_database(self.database_key) as session: metadata = schema.DirectIngestIngestFileMetadata( region_code=self.region_code, file_tag=original_file_metadata.file_tag, is_invalidated=False, is_file_split=True, job_creation_time=datetime.datetime.now(tz=pytz.UTC), normalized_file_name=path.file_name, datetimes_contained_lower_bound_exclusive=original_file_metadata .datetimes_contained_lower_bound_exclusive, datetimes_contained_upper_bound_inclusive=original_file_metadata .datetimes_contained_upper_bound_inclusive, ingest_database_name=original_file_metadata. ingest_database_name, ) session.add(metadata) session.commit() return self._ingest_file_schema_metadata_as_entity(metadata)
def mark_raw_file_as_processed(self, path: GcsfsFilePath) -> None: self._check_is_raw_file_path(path) with SessionFactory.using_database(self.database_key) as session: metadata = dao.get_raw_file_metadata_row_for_path( session, self.region_code, path) metadata.processed_time = datetime.datetime.now(tz=pytz.UTC)
def test_add_person_simple_no_flush(self) -> None: with SessionFactory.using_database( self.database_key, autocommit=False ) as session: # Arrange db_external_id = generate_external_id( state_code=self.state_code, external_id=EXTERNAL_ID_1, id_type=ID_TYPE_1 ) db_person = generate_person( state_code=self.state_code, external_ids=[db_external_id] ) session.add(db_person) output_people = [db_person] # Act with self.assertRaises(SessionIsDirtyError) as e: _ = validate_invariants( session, self.system_level, self.state_code, output_people ) # Assert self.assertEqual( str(e.exception), "Session unexpectedly dirty - flush before querying the database.", )
def testWriteDf_rowsWithSameColumnsThatMustBeUnique_onlyWritesOnce(self): # Arrange shared_fips = "12345" subject = pd.DataFrame({ "county_name": ["Alachua", "Baker"], "county_population": [257062, 26965], "average_daily_population": [799, 478], "date_reported": [pd.NaT, pd.NaT], "fips": 2 * [shared_fips], "report_date": 2 * [DATE_SCRAPED], "aggregation_window": 2 * [enum_strings.monthly_granularity], "report_frequency": 2 * [enum_strings.monthly_granularity], }) # Act dao.write_df(FlCountyAggregate, subject) # Assert with SessionFactory.using_database(self.database_key, autocommit=False) as session: query = session.query(FlCountyAggregate) self.assertEqual(len(query.all()), 1)
def test_add_court_case_conflicting_external_id_person_id_no_flush( self) -> None: # Arrange with SessionFactory.using_database(self.database_key, autocommit=False) as session: db_person = generate_person(state_code=self.state_code, person_id=self.PERSON_ID_1) db_judge = generate_agent(state_code=self.state_code, external_id=self.EXTERNAL_ID_1) db_court_case = generate_court_case( person=db_person, external_id=self.EXTERNAL_ID_1, state_code=self.state_code, judge=db_judge, ) session.add(db_court_case) session.commit() db_judge = generate_agent(state_code=self.state_code, external_id=self.EXTERNAL_ID_2) db_court_case_dupe = generate_court_case( person=db_person, external_id=self.EXTERNAL_ID_1, state_code=self.state_code, judge=db_judge, ) # Act session.add(db_court_case_dupe) with self.assertRaises(sqlalchemy.exc.IntegrityError): session.commit()
def test_many_to_one_no_backref(self): family = self.SimpsonsFamily() self.assertEqual(len(family.homer.children), 0) family.root.parents = [family.homer] family.homer.children = [family.bart, family.maggie] toy = entities.Toy.new_with_defaults(toy_id=456789, name="Skateboard") family.bart.favorite_toy = toy family.maggie.favorite_toy = toy schema_root = _TestSchemaEntityConverter().convert(family.root) with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(schema_root) session.commit() db_roots = session.query(schema.Root).all() self.assertEqual(len(db_roots), 1) db_parents = session.query(schema.Parent).all() self.assertEqual(len(db_parents), 1) db_children = session.query(schema.Child).all() self.assertEqual(len(db_children), 2) db_toys = session.query(schema.Toy).all() self.assertEqual(len(db_toys), 1) converted_root = _TestSchemaEntityConverter().convert( one(db_roots)) self.assertEqual(len(converted_root.parents), 1) self.assertEqual(len(converted_root.parents[0].children), 2) self.assertEqual( converted_root.parents[0].children[0].favorite_toy, toy) self.assertEqual( converted_root.parents[0].children[1].favorite_toy, toy)
def test_simple_tree(self): """ Tests converting a simple graph with one root node and one child """ family = self.SimpsonsFamily() self.assertEqual(len(family.homer.children), 0) family.root.parents = [family.homer] schema_root = _TestSchemaEntityConverter().convert(family.root) with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(schema_root) session.commit() db_roots = session.query(schema.Root).all() self.assertEqual(len(db_roots), 1) db_parents = session.query(schema.Parent).all() self.assertEqual(len(db_parents), 1) converted_root = _TestSchemaEntityConverter().convert( one(db_roots)) self.assertEqual(len(converted_root.parents), 1) self.assertEqual(converted_root.parents[0], family.homer) self.assertEqual(converted_root.parents[0].children, [])
def test_add_behavior_2(self): parent = entities.Parent.new_with_defaults( full_name="Krusty the Clown", ) converter = _TestSchemaEntityConverter() schema_parent = converter.convert(parent) with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(schema_parent) session.commit() parents = session.query(schema.Parent).all() self.assertEqual(len(parents), 1) root = entities.Root.new_with_defaults(type=RootType.SIMPSONS, ) db_root = converter.convert(root) db_root.parents.append(parents[0]) session.add(db_root) session.commit() roots = session.query(schema.Root).all() self.assertEqual(len(roots), 1) parents = session.query(schema.Parent).all() self.assertEqual(len(parents), 1)
def _fetch_po_user_feedback() -> Tuple[str, HTTPStatus]: with SessionFactory.using_database( SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE), autocommit=False ) as session: results = ( session.query(CaseUpdate) .filter( CaseUpdate.comment.isnot(None), CaseUpdate.officer_external_id.notlike("demo::%"), ) .all() ) return ( jsonify( [ { "personExternalId": res.person_external_id, "officerExternalId": res.officer_external_id, "actionType": res.action_type, "comment": res.comment, "timestamp": str(res.action_ts), } for res in results ] ), HTTPStatus.OK, )
def test_add_person_conflicting_external_id_same_session(self) -> None: # Arrange db_external_id = generate_external_id( state_code=self.state_code, external_id=self.EXTERNAL_ID_1, id_type=self.ID_TYPE_1, ) db_person = generate_person(state_code=self.state_code, external_ids=[db_external_id]) db_external_id_duplicated = generate_external_id( state_code=self.state_code, external_id=self.EXTERNAL_ID_1, id_type=self.ID_TYPE_1, ) db_person_2 = generate_person(state_code=self.state_code, external_ids=[db_external_id_duplicated]) # Act with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(db_person) session.add(db_person_2) session.flush() with self.assertRaises(sqlalchemy.exc.IntegrityError): session.commit()
def test_import_gcs_csv_to_cloud_sql_swaps_tables(self) -> None: """Assert that the temp table and destination are successfully swapped.""" self.mock_cloud_sql_client.import_gcs_csv.side_effect = ( self._mock_load_data_from_csv) self.mock_cloud_sql_client.wait_until_operation_completed.return_value = True import_gcs_csv_to_cloud_sql( schema_type=SchemaType.CASE_TRIAGE, destination_table=self.table_name, gcs_uri=self.gcs_uri, columns=self.columns, ) self.mock_cloud_sql_client.import_gcs_csv.assert_called_with( instance_name=self.mock_instance_id, table_name=f"tmp__{self.table_name}", gcs_uri=self.gcs_uri, columns=self.columns, ) with SessionFactory.using_database(self.database_key, autocommit=False) as session: destination_table_rows = session.query( DashboardUserRestrictions).all() self.assertEqual(len(destination_table_rows), 1) self.assertEqual(destination_table_rows[0].restricted_user_email, self.user_1_email)
def test_readPlaceholderPeople(self) -> None: placeholder_person = schema.StatePerson(person_id=1, state_code=_STATE_CODE) person = schema.StatePerson(person_id=2, state_code=_STATE_CODE) person_external_id = schema.StatePersonExternalId( person_external_id_id=1, external_id=_EXTERNAL_ID, id_type=external_id_types.US_ND_SID, state_code=_STATE_CODE, person=person, ) person.external_ids = [person_external_id] with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(placeholder_person) session.add(person) session.commit() # Act people = dao.read_placeholder_persons(session, _STATE_CODE) # Assert expected_people = [placeholder_person] self.assertCountEqual(people, expected_people)
def test_convert_single_node_no_primary_key(self): parent = entities.Parent.new_with_defaults( full_name="Krusty the Clown", children=[]) converter = _TestSchemaEntityConverter() schema_parent = converter.convert(parent) # Converting entity to schema won't add a primary key if there isn't # one already. self.assertIsNone(schema_parent.parent_id) with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(schema_parent) session.commit() parents = session.query(schema.Parent).all() self.assertEqual(len(parents), 1) children = session.query(schema.Child).all() self.assertEqual(len(children), 0) converted_parent = _TestSchemaEntityConverter().convert( one(parents)) # ...but there will be a primary key after adding to the DB self.assertIsNotNone(converted_parent.parent_id) self.assertEqual(parent.full_name, converted_parent.full_name) self.assertEqual(parent.children, converted_parent.children)
def test_readPeopleByRootExternalIds_entireTreeReturnedWithOneMatch( self) -> None: # Arrange person = schema.StatePerson(person_id=1, state_code=_STATE_CODE) external_id_match = schema.StatePersonExternalId( person_external_id_id=1, external_id=_EXTERNAL_ID, id_type=external_id_types.US_ND_SID, state_code=_STATE_CODE, person=person, ) external_id_no_match = schema.StatePersonExternalId( person_external_id_id=2, external_id=_EXTERNAL_ID2, id_type=external_id_types.US_ND_SID, state_code=_STATE_CODE, person=person, ) person.external_ids = [external_id_match, external_id_no_match] with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(person) session.commit() # Act people = dao.read_people_by_cls_external_ids( session, _STATE_CODE, schema.StatePerson, [_EXTERNAL_ID]) # Assert expected_people = [person] self.assertCountEqual(people, expected_people)
def test_convert_rooted_graph(self): """ Tests converting a graph that has a single root node that is connected either directly or indirectly to all entities. """ family = self.SimpsonsFamily() family.root.parents = family.parent_entities family.homer.children = family.child_entities family.marge.children = family.child_entities schema_root = _TestSchemaEntityConverter().convert(family.root) with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(schema_root) session.commit() db_root = session.query(schema.Root).all() self.assertEqual(len(db_root), 1) db_parents = session.query(schema.Parent).all() self.assertEqual(len(db_parents), len(family.parent_entities)) db_children = session.query(schema.Child).all() self.assertEqual(len(db_children), len(family.child_entities)) converted_root = _TestSchemaEntityConverter().convert(one(db_root)) for converted_parent in converted_root.parents: self._check_children(converted_parent, family.child_entities) for converted_child in converted_parent.children: self._check_parents(converted_child, family.parent_entities)
def test_readPeopleByRootExternalIds_SentenceGroupExternalId(self) -> None: # Arrange person = schema.StatePerson(person_id=1, state_code=_STATE_CODE) sentence_group = schema.StateSentenceGroup( sentence_group_id=1, external_id=_EXTERNAL_ID, status=StateSentenceStatus.PRESENT_WITHOUT_INFO.value, state_code=_STATE_CODE, person=person, ) sentence_group_2 = schema.StateSentenceGroup( sentence_group_id=2, external_id=_EXTERNAL_ID2, status=StateSentenceStatus.PRESENT_WITHOUT_INFO.value, state_code=_STATE_CODE, person=person, ) person.sentence_groups = [sentence_group, sentence_group_2] with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(person) session.commit() # Act people = dao.read_people_by_cls_external_ids( session, _STATE_CODE, schema.StateSentenceGroup, [_EXTERNAL_ID]) # Assert expected_people = [person] self.assertCountEqual(people, expected_people)
def test_readPeople_byBirthdate(self) -> None: # Arrange person = schema.StatePerson(person_id=8, birthdate=_BIRTHDATE, state_code=_STATE_CODE) person_different_birthdate = schema.StatePerson( state_code=_STATE_CODE, person_id=9, birthdate=datetime.date(year=2002, month=1, day=2), ) with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(person) session.add(person_different_birthdate) session.commit() # Act people = dao.read_people(session, full_name=None, birthdate=_BIRTHDATE) # Assert expected_people = [person] self.assertCountEqual(people, expected_people)
def _retrieve_data_for_top_opportunities(state_code: StateCode) -> List[Recipient]: """Fetches list of recipients from the Case Triage backend where we store information about which opportunities are active via the OpportunityPresenter.""" recipients = [] for officer_email in _top_opps_email_recipient_addresses(): mismatches = _get_mismatch_data_for_officer(officer_email) if mismatches is not None: with SessionFactory.using_database( SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE), autocommit=False, ) as session: officer = CaseTriageQuerier.officer_for_email(session, officer_email) recipients.append( Recipient.from_report_json( { utils.KEY_EMAIL_ADDRESS: officer_email, utils.KEY_STATE_CODE: state_code.value, utils.KEY_DISTRICT: None, OFFICER_GIVEN_NAME: officer.given_names, "mismatches": mismatches, } ) ) return recipients
def test_add_two_people_same_id_type(self) -> None: # Arrange db_external_id = generate_external_id( state_code=self.state_code, external_id=EXTERNAL_ID_1, id_type=ID_TYPE_1 ) db_person = generate_person( state_code=self.state_code, external_ids=[db_external_id] ) db_external_id_2 = generate_external_id( state_code=self.state_code, external_id=EXTERNAL_ID_2, id_type=ID_TYPE_1 ) db_person_2 = generate_person( state_code=self.state_code, external_ids=[db_external_id_2] ) with SessionFactory.using_database( self.database_key, autocommit=False ) as session: # Act session.add(db_person) session.add(db_person_2) session.flush() output_people = [db_person, db_person_2] errors = validate_invariants( session, self.system_level, self.state_code, output_people ) # Assert self.assertEqual(0, errors)
def test_add_fine_conflicting_external_id_same_session(self) -> None: # Arrange db_person = schema.StatePerson(full_name=self.FULL_NAME, state_code=self.state_code) db_fine = schema.StateFine( person=db_person, status=StateFineStatus.EXTERNAL_UNKNOWN.value, state_code=self.state_code, external_id=self.EXTERNAL_ID_1, county_code=self.COUNTY_CODE, ) db_sentence_group = schema.StateSentenceGroup( status=StateSentenceStatus.EXTERNAL_UNKNOWN.value, external_id=self.EXTERNAL_ID_1, state_code=self.state_code, county_code=self.COUNTY_CODE, fines=[db_fine], ) db_external_id = schema.StatePersonExternalId( state_code=self.state_code, external_id=self.EXTERNAL_ID_1, id_type=self.ID_TYPE_1, ) db_person.sentence_groups = [db_sentence_group] db_person.external_ids = [db_external_id] db_person_dupe = schema.StatePerson(full_name=self.FULL_NAME, state_code=self.state_code) db_fine_dupe = schema.StateFine( person=db_person_dupe, status=StateFineStatus.EXTERNAL_UNKNOWN.value, state_code=self.state_code, external_id=self.EXTERNAL_ID_1, county_code=self.COUNTY_CODE, ) db_sentence_group_dupe = schema.StateSentenceGroup( status=StateSentenceStatus.EXTERNAL_UNKNOWN.value, external_id=self.EXTERNAL_ID_2, state_code=self.state_code, county_code=self.COUNTY_CODE, fines=[db_fine_dupe], ) db_external_id_dupe = schema.StatePersonExternalId( state_code=self.state_code, external_id=self.EXTERNAL_ID_2, id_type=self.ID_TYPE_1, ) db_person_dupe.sentence_groups = [db_sentence_group_dupe] db_person_dupe.external_ids = [db_external_id_dupe] # Act with SessionFactory.using_database(self.database_key, autocommit=False) as session: session.add(db_fine) session.add(db_fine_dupe) session.flush() with self.assertRaises(sqlalchemy.exc.IntegrityError): session.commit()
def test_fetch_officer_by_hash(self) -> None: officer_1 = generate_fake_officer("id_1", "*****@*****.**") officer_2 = generate_fake_officer("id_2", "*****@*****.**") with SessionFactory.using_database(self.database_key) as session: session.add(officer_1) session.add(officer_2) with SessionFactory.using_database(self.database_key, autocommit=False) as read_session: fetch_by_email_address = CaseTriageQuerier.officer_for_hashed_email( read_session, hash_email("*****@*****.**")) self.assertEqual(fetch_by_email_address.external_id, "id_1") fetch_by_hashed_email = CaseTriageQuerier.officer_for_hashed_email( read_session, hash_email("*****@*****.**"), ) self.assertEqual(fetch_by_hashed_email.external_id, "id_2")