Beispiel #1
0
    def test_many_to_one_no_backref(self):
        family = self.SimpsonsFamily()
        self.assertEqual(len(family.homer.children), 0)
        family.root.parents = [family.homer]
        family.homer.children = [family.bart, family.maggie]
        toy = entities.Toy.new_with_defaults(toy_id=456789, name='Skateboard')
        family.bart.favorite_toy = toy
        family.maggie.favorite_toy = toy

        schema_root = TestSchemaEntityConverter().convert(family.root)

        session = SessionFactory.for_schema_base(TestBase)
        session.add(schema_root)
        session.commit()

        db_roots = session.query(schema.Root).all()
        self.assertEqual(len(db_roots), 1)
        db_parents = session.query(schema.Parent).all()
        self.assertEqual(len(db_parents), 1)
        db_children = session.query(schema.Child).all()
        self.assertEqual(len(db_children), 2)
        db_toys = session.query(schema.Toy).all()
        self.assertEqual(len(db_toys), 1)

        converted_root = TestSchemaEntityConverter().convert(one(db_roots))
        self.assertEqual(len(converted_root.parents), 1)
        self.assertEqual(len(converted_root.parents[0].children), 2)
        self.assertEqual(converted_root.parents[0].children[0].favorite_toy,
                         toy)
        self.assertEqual(converted_root.parents[0].children[1].favorite_toy,
                         toy)
    def test_readPeopleByRootExternalIds_entireTreeReturnedWithOneMatch(self):
        # Arrange
        person = schema.StatePerson(person_id=1)
        external_id_match = schema.StatePersonExternalId(
            person_external_id_id=1,
            external_id=_EXTERNAL_ID,
            id_type=external_id_types.US_ND_SID,
            state_code=_STATE_CODE,
            person=person,
        )
        external_id_no_match = schema.StatePersonExternalId(
            person_external_id_id=2,
            external_id=_EXTERNAL_ID,
            id_type=external_id_types.US_ND_SID,
            state_code=_STATE_CODE,
            person=person,
        )
        person.external_ids = [external_id_match, external_id_no_match]

        session = SessionFactory.for_schema_base(StateBase)
        session.add(person)
        session.commit()

        # Act
        people = dao.read_people_by_cls_external_ids(session, _STATE_CODE,
                                                     schema.StatePerson,
                                                     [_EXTERNAL_ID])

        # Assert
        expected_people = [person]

        self.assertCountEqual(people, expected_people)
    def test_readPeopleByExternalId(self):
        # Arrange
        person_no_match = schema.StatePerson(person_id=1)
        person_match_external_id = schema.StatePerson(person_id=2)
        person_external_id = schema.StatePersonExternalId(
            person_external_id_id=1,
            external_id=_EXTERNAL_ID,
            id_type=external_id_types.US_ND_SID,
            state_code=_STATE_CODE,
            person=person_match_external_id,
        )
        person_match_external_id.external_ids = [person_external_id]

        session = SessionFactory.for_schema_base(StateBase)
        session.add(person_no_match)
        session.add(person_match_external_id)
        session.commit()

        ingested_person = entities.StatePerson.new_with_defaults()
        ingested_person.external_ids = \
            [entities.StatePersonExternalId.new_with_defaults(
                external_id=_EXTERNAL_ID,
                id_type=external_id_types.US_ND_SID,
                state_code=_STATE_CODE,
                person=ingested_person,
            )]

        # Act
        people = dao.read_people_by_external_ids(session, _REGION,
                                                 [ingested_person])

        # Assert
        expected_people = [person_match_external_id]

        self.assertCountEqual(people, expected_people)
Beispiel #4
0
    def test_ingest_view_file_same_args_after_invalidation(self):
        args = GcsfsIngestViewExportArgs(
            ingest_view_name='file_tag',
            upper_bound_datetime_prev=datetime.datetime(
                2015, 1, 2, 2, 2, 2, 2),
            upper_bound_datetime_to_export=datetime.datetime(
                2015, 1, 2, 3, 3, 3, 3))

        ingest_view_unprocessed_path = self._make_unprocessed_path(
            'bucket/file_tag.csv', GcsfsDirectIngestFileType.INGEST_VIEW)
        self.run_ingest_view_file_progression(args, self.metadata_manager,
                                              ingest_view_unprocessed_path)

        # Invalidate the previous row
        session = SessionFactory.for_schema_base(OperationsBase)
        results = session.query(schema.DirectIngestIngestFileMetadata).all()
        result = one(results)
        result.is_invalidated = True
        session.commit()

        # Now we can rerun with the same args
        ingest_view_unprocessed_path = self._make_unprocessed_path(
            'bucket/file_tag.csv',
            GcsfsDirectIngestFileType.INGEST_VIEW,
            dt=datetime.datetime.now())
        self.run_ingest_view_file_progression(args, self.metadata_manager,
                                              ingest_view_unprocessed_path)
Beispiel #5
0
    def testWrite_SingleCountWithDateAndAllDemographics(self):
        params = {
            'jid': '01001001',
            'ethnicity': Ethnicity.HISPANIC.value,
            'gender': Gender.FEMALE.value,
            'race': Race.BLACK.value,
            'count': 311,
            'date': '2019-01-01',
        }

        headers = {'X-Appengine-Cron': 'test-cron'}
        response = self.client.get(f'/single_count?{urlencode(params)}',
                                   headers=headers)
        self.assertEqual(response.status_code, 200)

        # Assert
        query = SessionFactory.for_schema_base(JailsBase).query(
            SingleCountAggregate)
        result = one(query.all())

        self.assertEqual(result.count, params['count'])
        self.assertEqual(result.date,
                         str_field_utils.parse_date(params['date']))
        self.assertEqual(result.ethnicity, params['ethnicity'])
        self.assertEqual(result.gender, params['gender'])
        self.assertEqual(result.race, params['race'])
Beispiel #6
0
    def testWriteDf(self):
        # Arrange
        subject = pd.DataFrame({
            'county_name': ['Alachua', 'Baker', 'Bay', 'Bradford', 'Brevard'],
            'county_population': [257062, 26965, 176016, 27440, 568919],
            'average_daily_population': [799, 478, 1015, 141, 1547],
            'date_reported': [pd.NaT, pd.NaT,
                              datetime.datetime(year=2017, month=9, day=1),
                              pd.NaT, pd.NaT],
            'fips': ['00000', '00001', '00002', '00003', '00004'],
            'report_date': 5 * [DATE_SCRAPED],
            'aggregation_window': 5 * [enum_strings.monthly_granularity],
            'report_frequency': 5 * [enum_strings.monthly_granularity]
        })

        # Act
        dao.write_df(FlCountyAggregate, subject)

        # Assert
        query = SessionFactory.for_schema_base(JailsBase) \
            .query(FlCountyAggregate) \
            .filter(FlCountyAggregate.county_name == 'Bay')
        result = one(query.all())

        self.assertEqual(result.county_name, 'Bay')
        self.assertEqual(result.county_population, 176016)
        self.assertEqual(result.average_daily_population, 1015)
        self.assertEqual(result.date_reported,
                         datetime.date(year=2017, month=9, day=1))
        self.assertEqual(result.fips, '00002')
        self.assertEqual(result.report_date, DATE_SCRAPED)
        self.assertEqual(result.aggregation_window,
                         enum_strings.monthly_granularity)
Beispiel #7
0
    def mark_raw_file_as_processed(self, path: GcsfsFilePath) -> None:
        self._check_is_raw_file_path(path)
        with SessionFactory.using_database(self.database_key) as session:
            metadata = dao.get_raw_file_metadata_row_for_path(
                session, self.region_code, path)

            metadata.processed_time = datetime.datetime.now(tz=pytz.UTC)
    def get_ingest_view_metadata_for_export_job(
        self, ingest_view_job_args: GcsfsIngestViewExportArgs
    ) -> DirectIngestIngestFileMetadata:

        session = SessionFactory.for_schema_base(OperationsBase)

        try:
            metadata = dao.get_ingest_view_metadata_for_export_job(
                session=session,
                region_code=self.region_code,
                file_tag=ingest_view_job_args.ingest_view_name,
                datetimes_contained_lower_bound_exclusive=ingest_view_job_args.upper_bound_datetime_prev,
                datetimes_contained_upper_bound_inclusive=ingest_view_job_args.upper_bound_datetime_to_export,
            )

            if not metadata:
                raise ValueError(
                    f"No metadata found for export job args [{ingest_view_job_args}]"
                )

            metadata_entity = self._ingest_file_schema_metadata_as_entity(metadata)
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()

        return metadata_entity
    def register_ingest_view_export_file_name(
        self,
        metadata_entity: DirectIngestIngestFileMetadata,
        exported_path: GcsfsFilePath,
    ) -> None:
        parts = filename_parts_from_path(exported_path)
        if parts.file_type != GcsfsDirectIngestFileType.INGEST_VIEW:
            raise ValueError(f"Exported path has unexpected type {parts.file_type}")

        session = SessionFactory.for_schema_base(OperationsBase)

        try:
            metadata = dao.get_file_metadata_row(
                session, GcsfsDirectIngestFileType.INGEST_VIEW, metadata_entity.file_id
            )

            if metadata.normalized_file_name:
                raise ValueError(
                    f"Normalized file name already set to [{metadata.normalized_file_name}] for file id "
                    f"[{metadata.file_id}]"
                )

            metadata.normalized_file_name = exported_path.file_name
            session.commit()
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()
Beispiel #10
0
    def test_readPeople(self):
        # Arrange
        person = schema.StatePerson(person_id=8,
                                    full_name=_FULL_NAME,
                                    birthdate=_BIRTHDATE)
        person_different_name = schema.StatePerson(person_id=9,
                                                   full_name='diff_name')
        person_different_birthdate = schema.StatePerson(
            person_id=10, birthdate=datetime.date(year=2002, month=1, day=2))
        session = SessionFactory.for_schema_base(StateBase)
        session.add(person)
        session.add(person_different_name)
        session.add(person_different_birthdate)
        session.commit()

        # Act
        people = dao.read_people(session, full_name=None, birthdate=None)

        # Assert
        expected_people = [
            converter.convert_schema_object_to_entity(person),
            converter.convert_schema_object_to_entity(person_different_name),
            converter.convert_schema_object_to_entity(
                person_different_birthdate)
        ]

        self.assertCountEqual(people, expected_people)
    def mark_file_as_discovered(self, path: GcsfsFilePath) -> None:
        if not path.file_name.startswith(DIRECT_INGEST_UNPROCESSED_PREFIX):
            raise ValueError("Expect only unprocessed paths in this function.")

        parts = filename_parts_from_path(path)
        session = SessionFactory.for_schema_base(OperationsBase)

        try:
            if parts.file_type == GcsfsDirectIngestFileType.INGEST_VIEW:
                metadata = dao.get_file_metadata_row_for_path(
                    session, self.region_code, path
                )
                dt = datetime.datetime.utcnow()
                if not metadata.export_time:
                    metadata.export_time = dt
                metadata.discovery_time = dt
            elif parts.file_type == GcsfsDirectIngestFileType.RAW_DATA:
                session.add(
                    schema.DirectIngestRawFileMetadata(
                        region_code=self.region_code,
                        file_tag=parts.file_tag,
                        normalized_file_name=path.file_name,
                        discovery_time=datetime.datetime.utcnow(),
                        processed_time=None,
                        datetimes_contained_upper_bound_inclusive=parts.utc_upload_datetime,
                    )
                )
            else:
                raise ValueError(f"Unexpected path type: {parts.file_type}")
            session.commit()
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()
Beispiel #12
0
    def test_readPeopleByRootExternalIds(self):
        # Arrange
        person_no_match = schema.StatePerson(person_id=1)
        person_match_external_id = schema.StatePerson(person_id=2)
        person_external_id = schema.StatePersonExternalId(
            person_external_id_id=1,
            external_id=_EXTERNAL_ID,
            id_type=external_id_types.US_ND_SID,
            state_code=_STATE_CODE,
            person=person_match_external_id,
        )
        person_match_external_id.external_ids = [person_external_id]

        session = SessionFactory.for_schema_base(StateBase)
        session.add(person_no_match)
        session.add(person_match_external_id)
        session.commit()

        # Act
        people = dao.read_people_by_cls_external_ids(session, _STATE_CODE,
                                                     entities.StatePerson,
                                                     [_EXTERNAL_ID])

        # Assert
        expected_people = [
            converter.convert_schema_object_to_entity(person_match_external_id)
        ]

        self.assertCountEqual(people, expected_people)
Beispiel #13
0
    def test_readPlaceholderPeople(self):
        placeholder_person = schema.StatePerson(person_id=1,
                                                state_code=_STATE_CODE)
        person = schema.StatePerson(person_id=2, state_code=_STATE_CODE)
        person_external_id = schema.StatePersonExternalId(
            person_external_id_id=1,
            external_id=_EXTERNAL_ID,
            id_type=external_id_types.US_ND_SID,
            state_code=_STATE_CODE,
            person=person,
        )
        person.external_ids = [person_external_id]

        session = SessionFactory.for_schema_base(StateBase)
        session.add(placeholder_person)
        session.add(person)
        session.commit()

        # Act
        people = dao.read_placeholder_persons(session)

        # Assert
        expected_people = [placeholder_person]

        self.assertCountEqual(people, expected_people)
Beispiel #14
0
    def setUp(self) -> None:
        fakes.use_in_memory_sqlite_database(TestBase)

        session = SessionFactory.for_schema_base(TestBase)
        self.assertEqual(len(session.query(schema.Root).all()), 0)
        self.assertEqual(len(session.query(schema.Parent).all()), 0)
        self.assertEqual(len(session.query(schema.Child).all()), 0)
Beispiel #15
0
    def test_readPeople_byBirthdate(self) -> None:
        # Arrange
        person = schema.StatePerson(person_id=8,
                                    birthdate=_BIRTHDATE,
                                    state_code=_STATE_CODE)
        person_different_birthdate = schema.StatePerson(
            state_code=_STATE_CODE,
            person_id=9,
            birthdate=datetime.date(year=2002, month=1, day=2),
        )

        with SessionFactory.using_database(self.database_key,
                                           autocommit=False) as session:
            session.add(person)
            session.add(person_different_birthdate)
            session.commit()

            # Act
            people = dao.read_people(session,
                                     full_name=None,
                                     birthdate=_BIRTHDATE)

            # Assert
            expected_people = [person]
            self.assertCountEqual(people, expected_people)
    def get_metadata_for_raw_files_discovered_after_datetime(
        self,
        raw_file_tag: str,
        discovery_time_lower_bound_exclusive: Optional[datetime.datetime],
    ) -> List[DirectIngestRawFileMetadata]:
        session = SessionFactory.for_schema_base(OperationsBase)

        try:
            results = dao.get_metadata_for_raw_files_discovered_after_datetime(
                session=session,
                region_code=self.region_code,
                raw_file_tag=raw_file_tag,
                discovery_time_lower_bound_exclusive=discovery_time_lower_bound_exclusive,
            )

            metadata_entities = [
                self._raw_file_schema_metadata_as_entity(metadata)
                for metadata in results
            ]
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()

        return metadata_entities
Beispiel #17
0
    def testWriteDf_rowsWithSameColumnsThatMustBeUnique_onlyWritesOnce(self):
        # Arrange
        shared_fips = "12345"
        subject = pd.DataFrame({
            "county_name": ["Alachua", "Baker"],
            "county_population": [257062, 26965],
            "average_daily_population": [799, 478],
            "date_reported": [pd.NaT, pd.NaT],
            "fips":
            2 * [shared_fips],
            "report_date":
            2 * [DATE_SCRAPED],
            "aggregation_window":
            2 * [enum_strings.monthly_granularity],
            "report_frequency":
            2 * [enum_strings.monthly_granularity],
        })

        # Act
        dao.write_df(FlCountyAggregate, subject)

        # Assert
        with SessionFactory.using_database(self.database_key,
                                           autocommit=False) as session:
            query = session.query(FlCountyAggregate)
            self.assertEqual(len(query.all()), 1)
    def register_ingest_file_export_job(
        self, ingest_view_job_args: GcsfsIngestViewExportArgs
    ) -> DirectIngestIngestFileMetadata:
        session = SessionFactory.for_schema_base(OperationsBase)

        try:
            metadata = schema.DirectIngestIngestFileMetadata(
                region_code=self.region_code,
                file_tag=ingest_view_job_args.ingest_view_name,
                is_invalidated=False,
                is_file_split=False,
                job_creation_time=datetime.datetime.utcnow(),
                datetimes_contained_lower_bound_exclusive=ingest_view_job_args.upper_bound_datetime_prev,
                datetimes_contained_upper_bound_inclusive=ingest_view_job_args.upper_bound_datetime_to_export,
            )
            session.add(metadata)
            session.commit()
            metadata_entity = self._ingest_file_schema_metadata_as_entity(metadata)
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()

        return metadata_entity
Beispiel #19
0
def _retrieve_data_for_top_opportunities(state_code: StateCode) -> List[Recipient]:
    """Fetches list of recipients from the Case Triage backend where we store information
    about which opportunities are active via the OpportunityPresenter."""
    recipients = []
    for officer_email in _top_opps_email_recipient_addresses():
        mismatches = _get_mismatch_data_for_officer(officer_email)
        if mismatches is not None:
            with SessionFactory.using_database(
                SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE),
                autocommit=False,
            ) as session:
                officer = CaseTriageQuerier.officer_for_email(session, officer_email)
                recipients.append(
                    Recipient.from_report_json(
                        {
                            utils.KEY_EMAIL_ADDRESS: officer_email,
                            utils.KEY_STATE_CODE: state_code.value,
                            utils.KEY_DISTRICT: None,
                            OFFICER_GIVEN_NAME: officer.given_names,
                            "mismatches": mismatches,
                        }
                    )
                )

    return recipients
    def register_ingest_file_split(
        self,
        original_file_metadata: DirectIngestIngestFileMetadata,
        path: GcsfsFilePath,
    ) -> DirectIngestIngestFileMetadata:
        session = SessionFactory.for_schema_base(OperationsBase)

        try:
            metadata = schema.DirectIngestIngestFileMetadata(
                region_code=self.region_code,
                file_tag=original_file_metadata.file_tag,
                is_invalidated=False,
                is_file_split=True,
                job_creation_time=datetime.datetime.utcnow(),
                normalized_file_name=path.file_name,
                datetimes_contained_lower_bound_exclusive=original_file_metadata.datetimes_contained_lower_bound_exclusive,
                datetimes_contained_upper_bound_inclusive=original_file_metadata.datetimes_contained_upper_bound_inclusive,
            )
            session.add(metadata)
            session.commit()
            metadata_entity = self._ingest_file_schema_metadata_as_entity(metadata)
        except Exception as e:
            session.rollback()
            raise e
        finally:
            session.close()

        return metadata_entity
Beispiel #21
0
    def register_ingest_file_split(
        self,
        original_file_metadata: DirectIngestIngestFileMetadata,
        path: GcsfsFilePath,
    ) -> DirectIngestIngestFileMetadata:
        self._check_is_ingest_view_file_path(path)

        with SessionFactory.using_database(self.database_key) as session:
            metadata = schema.DirectIngestIngestFileMetadata(
                region_code=self.region_code,
                file_tag=original_file_metadata.file_tag,
                is_invalidated=False,
                is_file_split=True,
                job_creation_time=datetime.datetime.now(tz=pytz.UTC),
                normalized_file_name=path.file_name,
                datetimes_contained_lower_bound_exclusive=original_file_metadata
                .datetimes_contained_lower_bound_exclusive,
                datetimes_contained_upper_bound_inclusive=original_file_metadata
                .datetimes_contained_upper_bound_inclusive,
                ingest_database_name=original_file_metadata.
                ingest_database_name,
            )
            session.add(metadata)
            session.commit()
            return self._ingest_file_schema_metadata_as_entity(metadata)
Beispiel #22
0
    def testWriteDf_rowsWithSameColumnsThatMustBeUnique_onlyWritesOnce(self):
        # Arrange
        shared_fips = '12345'
        subject = pd.DataFrame({
            'county_name': ['Alachua', 'Baker'],
            'county_population': [257062, 26965],
            'average_daily_population': [799, 478],
            'date_reported': [pd.NaT, pd.NaT],
            'fips':
            2 * [shared_fips],
            'report_date':
            2 * [DATE_SCRAPED],
            'aggregation_window':
            2 * [enum_strings.monthly_granularity],
            'report_frequency':
            2 * [enum_strings.monthly_granularity]
        })

        # Act
        dao.write_df(FlCountyAggregate, subject)

        # Assert
        query = \
            SessionFactory.for_schema_base(JailsBase).query(FlCountyAggregate)
        self.assertEqual(len(query.all()), 1)
Beispiel #23
0
 def _fetch_po_user_feedback() -> Tuple[str, HTTPStatus]:
     with SessionFactory.using_database(
         SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE), autocommit=False
     ) as session:
         results = (
             session.query(CaseUpdate)
             .filter(
                 CaseUpdate.comment.isnot(None),
                 CaseUpdate.officer_external_id.notlike("demo::%"),
             )
             .all()
         )
         return (
             jsonify(
                 [
                     {
                         "personExternalId": res.person_external_id,
                         "officerExternalId": res.officer_external_id,
                         "actionType": res.action_type,
                         "comment": res.comment,
                         "timestamp": str(res.action_ts),
                     }
                     for res in results
                 ]
             ),
             HTTPStatus.OK,
         )
Beispiel #24
0
    def test_ingest_view_file_progression_same_args_twice_throws(self) -> None:
        args = GcsfsIngestViewExportArgs(
            ingest_view_name="file_tag",
            upper_bound_datetime_prev=datetime.datetime(
                2015, 1, 2, 2, 2, 2, 2),
            upper_bound_datetime_to_export=datetime.datetime(
                2015, 1, 2, 3, 3, 3, 3),
        )

        ingest_view_unprocessed_path = self._make_unprocessed_path(
            "bucket/file_tag.csv", GcsfsDirectIngestFileType.INGEST_VIEW)
        self.run_ingest_view_file_progression(args, self.metadata_manager,
                                              ingest_view_unprocessed_path)

        with self.assertRaises(IntegrityError):
            ingest_view_unprocessed_path = self._make_unprocessed_path(
                "bucket/file_tag.csv",
                GcsfsDirectIngestFileType.INGEST_VIEW,
                dt=datetime.datetime.now(),
            )
            self.run_ingest_view_file_progression(
                args, self.metadata_manager, ingest_view_unprocessed_path)

        session = SessionFactory.for_schema_base(OperationsBase)
        results = session.query(schema.DirectIngestIngestFileMetadata).all()
        self.assertEqual(1, len(results))
Beispiel #25
0
    def test_import_gcs_csv_to_cloud_sql_swaps_tables(self) -> None:
        """Assert that the temp table and destination are successfully swapped."""
        self.mock_cloud_sql_client.import_gcs_csv.side_effect = (
            self._mock_load_data_from_csv)
        self.mock_cloud_sql_client.wait_until_operation_completed.return_value = True

        import_gcs_csv_to_cloud_sql(
            schema_type=SchemaType.CASE_TRIAGE,
            destination_table=self.table_name,
            gcs_uri=self.gcs_uri,
            columns=self.columns,
        )
        self.mock_cloud_sql_client.import_gcs_csv.assert_called_with(
            instance_name=self.mock_instance_id,
            table_name=f"tmp__{self.table_name}",
            gcs_uri=self.gcs_uri,
            columns=self.columns,
        )
        with SessionFactory.using_database(self.database_key,
                                           autocommit=False) as session:
            destination_table_rows = session.query(
                DashboardUserRestrictions).all()

        self.assertEqual(len(destination_table_rows), 1)
        self.assertEqual(destination_table_rows[0].restricted_user_email,
                         self.user_1_email)
Beispiel #26
0
    def test_readPlaceholderPeople(self) -> None:
        placeholder_person = schema.StatePerson(person_id=1,
                                                state_code=_STATE_CODE)
        person = schema.StatePerson(person_id=2, state_code=_STATE_CODE)
        person_external_id = schema.StatePersonExternalId(
            person_external_id_id=1,
            external_id=_EXTERNAL_ID,
            id_type=external_id_types.US_ND_SID,
            state_code=_STATE_CODE,
            person=person,
        )
        person.external_ids = [person_external_id]

        with SessionFactory.using_database(self.database_key,
                                           autocommit=False) as session:
            session.add(placeholder_person)
            session.add(person)
            session.commit()

            # Act
            people = dao.read_placeholder_persons(session, _STATE_CODE)

            # Assert
            expected_people = [placeholder_person]

            self.assertCountEqual(people, expected_people)
Beispiel #27
0
    def test_readPeopleByRootExternalIds_SentenceGroupExternalId(self):
        # Arrange
        person = schema.StatePerson(person_id=1)
        sentence_group = schema.StateSentenceGroup(
            sentence_group_id=1,
            external_id=_EXTERNAL_ID,
            status=StateSentenceStatus.PRESENT_WITHOUT_INFO.value,
            state_code=_STATE_CODE,
            person=person)
        sentence_group_2 = schema.StateSentenceGroup(
            sentence_group_id=2,
            external_id=_EXTERNAL_ID2,
            status=StateSentenceStatus.PRESENT_WITHOUT_INFO.value,
            state_code=_STATE_CODE,
            person=person)
        person.sentence_groups = [sentence_group, sentence_group_2]

        session = SessionFactory.for_schema_base(StateBase)
        session.add(person)
        session.commit()

        # Act
        people = dao.read_people_by_cls_external_ids(session, _STATE_CODE,
                                                     schema.StateSentenceGroup,
                                                     [_EXTERNAL_ID])

        # Assert
        expected_people = [person]

        self.assertCountEqual(people, expected_people)
Beispiel #28
0
    def test_readPeopleByRootExternalIds(self) -> None:
        # Arrange
        person_no_match = schema.StatePerson(person_id=1,
                                             state_code=_STATE_CODE)
        person_match_external_id = schema.StatePerson(person_id=2,
                                                      state_code=_STATE_CODE)
        person_external_id = schema.StatePersonExternalId(
            person_external_id_id=1,
            external_id=_EXTERNAL_ID,
            id_type=external_id_types.US_ND_SID,
            state_code=_STATE_CODE,
            person=person_match_external_id,
        )
        person_match_external_id.external_ids = [person_external_id]

        with SessionFactory.using_database(self.database_key,
                                           autocommit=False) as session:
            session.add(person_no_match)
            session.add(person_match_external_id)
            session.commit()

            # Act
            people = dao.read_people_by_cls_external_ids(
                session, _STATE_CODE, schema.StatePerson, [_EXTERNAL_ID])

            # Assert
            expected_people = [person_match_external_id]

            self.assertCountEqual(people, expected_people)
Beispiel #29
0
    def test_add_behavior_2(self):
        fakes.use_in_memory_sqlite_database(TestBase)

        parent = entities.Parent.new_with_defaults(
            full_name='Krusty the Clown',
        )
        converter = TestSchemaEntityConverter()
        schema_parent = converter.convert(parent)

        session = SessionFactory.for_schema_base(TestBase)
        session.add(schema_parent)
        session.commit()

        parents = session.query(schema.Parent).all()
        self.assertEqual(len(parents), 1)

        root = entities.Root.new_with_defaults(
            type=RootType.SIMPSONS,
        )

        db_root = converter.convert(root)
        db_root.parents.append(parents[0])
        session.add(db_root)
        session.commit()

        roots = session.query(schema.Root).all()
        self.assertEqual(len(roots), 1)

        parents = session.query(schema.Parent).all()
        self.assertEqual(len(parents), 1)
Beispiel #30
0
    def test_convert_rooted_graph(self):
        """
        Tests converting a graph that has a single root node that is connected
        either directly or indirectly to all entities.
        """
        family = self.SimpsonsFamily()

        family.root.parents = family.parent_entities
        family.homer.children = family.child_entities
        family.marge.children = family.child_entities

        schema_root = TestSchemaEntityConverter().convert(family.root)

        session = SessionFactory.for_schema_base(TestBase)
        session.add(schema_root)
        session.commit()

        db_root = session.query(schema.Root).all()
        self.assertEqual(len(db_root), 1)

        db_parents = session.query(schema.Parent).all()
        self.assertEqual(len(db_parents), len(family.parent_entities))

        db_children = session.query(schema.Child).all()
        self.assertEqual(len(db_children), len(family.child_entities))

        converted_root = TestSchemaEntityConverter().convert(one(db_root))

        for converted_parent in converted_root.parents:
            self._check_children(converted_parent, family.child_entities)
            for converted_child in converted_parent.children:
                self._check_parents(converted_child, family.parent_entities)