Exemplo n.º 1
0
    def testCalculateIncarcerationMetricCombinations_NoIncarceration(self):
        """Tests the CalculateIncarcerationMetricCombinations when there are
        no incarceration_events. This should never happen because any person
        without incarceration events is dropped entirely from the pipeline."""
        fake_person = StatePerson.new_with_defaults(
            person_id=123,
            gender=Gender.MALE,
            birthdate=date(1970, 1, 1),
            residency_status=ResidencyStatus.PERMANENT)

        test_pipeline = TestPipeline()

        inputs = [(self.fake_person_id, {
            'person_incarceration_events': [(fake_person, [])],
            'person_metadata': [self.person_metadata]
        })]

        output = (test_pipeline
                  | beam.Create(inputs)
                  | beam.ParDo(ExtractPersonEventsMetadata())
                  | 'Calculate Incarceration Metrics' >> beam.ParDo(
                      pipeline.CalculateIncarcerationMetricCombinations(),
                      None, -1, ALL_METRICS_INCLUSIONS_DICT))

        assert_that(output, equal_to([]))

        test_pipeline.run()
Exemplo n.º 2
0
    def testCalculateIncarcerationMetricCombinations(self):
        """Tests the CalculateIncarcerationMetricCombinations DoFn."""
        fake_person = StatePerson.new_with_defaults(
            person_id=123,
            gender=Gender.MALE,
            birthdate=date(1970, 1, 1),
            residency_status=ResidencyStatus.PERMANENT)

        incarceration_events = [
            IncarcerationAdmissionEvent(
                state_code='US_XX',
                event_date=date(2001, 3, 16),
                facility='SAN QUENTIN',
                county_of_residence='county_of_residence',
                admission_reason=StateIncarcerationPeriodAdmissionReason.
                PROBATION_REVOCATION),
            IncarcerationReleaseEvent(
                state_code='US_XX',
                event_date=date(2002, 5, 26),
                facility='SAN QUENTIN',
                county_of_residence='county_of_residence',
                release_reason=StateIncarcerationPeriodReleaseReason.
                SENTENCE_SERVED)
        ]

        # One metric per methodology for each event
        expected_metric_count = 2

        expected_combination_counts = \
            {'admissions': expected_metric_count,
             'releases': expected_metric_count}

        test_pipeline = TestPipeline()

        inputs = [(self.fake_person_id, {
            'person_events': [(fake_person, incarceration_events)],
            'person_metadata': [self.person_metadata]
        })]

        output = (test_pipeline
                  | beam.Create(inputs)
                  | beam.ParDo(ExtractPersonEventsMetadata())
                  | 'Calculate Incarceration Metrics' >> beam.ParDo(
                      pipeline.CalculateIncarcerationMetricCombinations(),
                      None, -1, ALL_METRICS_INCLUSIONS_DICT))

        assert_that(
            output,
            AssertMatchers.count_combinations(expected_combination_counts),
            'Assert number of metrics is expected value')

        test_pipeline.run()
Exemplo n.º 3
0
    def testCalculateIncarcerationMetricCombinations_NoInput(self):
        """Tests the CalculateIncarcerationMetricCombinations when there is
        no input to the function."""

        test_pipeline = TestPipeline()

        output = (test_pipeline
                  | beam.Create([])
                  | 'Calculate Incarceration Metrics' >> beam.ParDo(
                      pipeline.CalculateIncarcerationMetricCombinations(),
                      None, -1, ALL_METRICS_INCLUSIONS_DICT))

        assert_that(output, equal_to([]))

        test_pipeline.run()
Exemplo n.º 4
0
    def testCalculateIncarcerationMetricCombinations(self):
        """Tests the CalculateIncarcerationMetricCombinations DoFn."""
        fake_person = StatePerson.new_with_defaults(
            person_id=123,
            gender=Gender.MALE,
            birthdate=date(1970, 1, 1),
            residency_status=ResidencyStatus.PERMANENT)

        incarceration_events = [
            IncarcerationAdmissionEvent(
                state_code='CA',
                event_date=date(2001, 3, 16),
                facility='SAN QUENTIN',
                county_of_residence='county_of_residence',
                admission_reason=StateIncarcerationPeriodAdmissionReason.
                PROBATION_REVOCATION),
            IncarcerationReleaseEvent(
                state_code='CA',
                event_date=date(2002, 5, 26),
                facility='SAN QUENTIN',
                county_of_residence='county_of_residence',
                release_reason=StateIncarcerationPeriodReleaseReason.
                SENTENCE_SERVED)
        ]

        # Get the number of combinations of person-event characteristics.
        num_combinations = len(
            calculator.characteristic_combinations(
                fake_person, incarceration_events[0], ALL_INCLUSIONS_DICT,
                IncarcerationMetricType.POPULATION))
        assert num_combinations > 0

        expected_metric_count = num_combinations * 2

        expected_admission_combination_counts = \
            {'admissions': expected_metric_count}

        expected_releases_combination_counts = \
            {'releases': expected_metric_count}

        test_pipeline = TestPipeline()

        output = (
            test_pipeline
            | beam.Create([(fake_person, incarceration_events)])
            | 'Calculate Incarceration Metrics' >> beam.ParDo(
                pipeline.CalculateIncarcerationMetricCombinations(), -1,
                ALL_INCLUSIONS_DICT).with_outputs('admissions', 'releases'))

        assert_that(
            output.admissions,
            AssertMatchers.count_combinations(
                expected_admission_combination_counts),
            'Assert number of admission metrics is expected value')

        assert_that(
            output.releases,
            AssertMatchers.count_combinations(
                expected_releases_combination_counts),
            'Assert number of release metrics is expected value')

        test_pipeline.run()
Exemplo n.º 5
0
    def testCalculateIncarcerationMetricCombinations(self):
        """Tests the CalculateIncarcerationMetricCombinations DoFn."""
        fake_person = StatePerson.new_with_defaults(
            state_code="US_XX",
            person_id=123,
            gender=Gender.MALE,
            birthdate=date(1970, 1, 1),
            residency_status=ResidencyStatus.PERMANENT,
        )

        incarceration_events = [
            IncarcerationAdmissionEvent(
                state_code="US_XX",
                event_date=date(2001, 3, 16),
                facility="SAN QUENTIN",
                county_of_residence="county_of_residence",
                admission_reason=StateIncarcerationPeriodAdmissionReason.
                PROBATION_REVOCATION,
            ),
            IncarcerationReleaseEvent(
                state_code="US_XX",
                event_date=date(2002, 5, 26),
                facility="SAN QUENTIN",
                county_of_residence="county_of_residence",
                release_reason=StateIncarcerationPeriodReleaseReason.
                SENTENCE_SERVED,
            ),
        ]

        expected_metric_count = 1

        expected_combination_counts = {
            "admissions": expected_metric_count,
            "releases": expected_metric_count,
        }

        test_pipeline = TestPipeline()

        inputs = [(
            self.fake_person_id,
            {
                "person_events": [(fake_person, incarceration_events)],
                "person_metadata": [self.person_metadata],
            },
        )]

        output = (test_pipeline
                  | beam.Create(inputs)
                  | beam.ParDo(ExtractPersonEventsMetadata())
                  | "Calculate Incarceration Metrics" >> beam.ParDo(
                      pipeline.CalculateIncarcerationMetricCombinations(),
                      None,
                      -1,
                      ALL_METRICS_INCLUSIONS_DICT,
                  ))

        assert_that(
            output,
            AssertMatchers.count_combinations(expected_combination_counts),
            "Assert number of metrics is expected value",
        )

        test_pipeline.run()