예제 #1
0
    def test_filter_recipients(self) -> None:
        dev_from_idaho = Recipient.from_report_json(
            {
                "email_address": "*****@*****.**",
                "state_code": "US_ID",
                "district": REGION_CODES["US_ID_D3"],
            }
        )
        dev_from_iowa = Recipient.from_report_json(
            {"email_address": "*****@*****.**", "state_code": "US_IA", "district": None}
        )
        recipients = [dev_from_idaho, dev_from_iowa]

        self.assertEqual(filter_recipients(recipients), recipients)
        self.assertEqual(
            filter_recipients(recipients, region_code="US_ID_D3"), [dev_from_idaho]
        )
        self.assertEqual(
            filter_recipients(
                recipients, region_code="US_ID_D3", email_allowlist=["*****@*****.**"]
            ),
            [],
        )
        self.assertEqual(
            filter_recipients(recipients, email_allowlist=["*****@*****.**"]),
            [dev_from_iowa],
        )
        self.assertEqual(
            filter_recipients(recipients, email_allowlist=["*****@*****.**"]), []
        )

        with self.assertRaises(InvalidRegionCodeException):
            filter_recipients(recipients, region_code="gibberish")
예제 #2
0
def _retrieve_data_for_top_opportunities(state_code: StateCode) -> List[Recipient]:
    """Fetches list of recipients from the Case Triage backend where we store information
    about which opportunities are active via the OpportunityPresenter."""
    recipients = []
    for officer_email in _top_opps_email_recipient_addresses():
        mismatches = _get_mismatch_data_for_officer(officer_email)
        if mismatches is not None:
            with SessionFactory.using_database(
                SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE),
                autocommit=False,
            ) as session:
                officer = CaseTriageQuerier.officer_for_email(session, officer_email)
                recipients.append(
                    Recipient.from_report_json(
                        {
                            utils.KEY_EMAIL_ADDRESS: officer_email,
                            utils.KEY_STATE_CODE: state_code.value,
                            utils.KEY_DISTRICT: None,
                            OFFICER_GIVEN_NAME: officer.given_names,
                            "mismatches": mismatches,
                        }
                    )
                )

    return recipients
예제 #3
0
    def setUp(self) -> None:
        with open(os.path.join(os.path.dirname(__file__),
                               FIXTURE_FILE)) as fixture_file:
            self.recipient = Recipient.from_report_json(
                json.loads(fixture_file.read()))
            self.recipient.data["batch_id"] = "20201105123033"

        project_id = "RECIDIVIZ_TEST"
        cdn_static_ip = "123.456.7.8"
        test_secrets = {"po_report_cdn_static_IP": cdn_static_ip}

        self.get_secret_patcher = patch("recidiviz.utils.secrets.get_secret")
        self.project_id_patcher = patch("recidiviz.utils.metadata.project_id")

        self.get_secret_patcher.start().side_effect = test_secrets.get
        self.project_id_patcher.start().return_value = project_id
예제 #4
0
    def test_generate_incomplete_data(self) -> None:
        """Test that no files are added to Google Cloud Storage and a KeyError is raised
        if the recipient data is missing a key needed for the HTML template."""

        with self.assertRaises(KeyError):
            recipient = Recipient.from_report_json(
                {
                    "email_address": "*****@*****.**",
                    "state_code": "US_ID",
                    "district": "DISTRICT OFFICE 3",
                }
            )

            report_context = self.report_context_type(self.state_code, recipient)
            generate(report_context)

        self.assertEqual(self.gcs_file_system.all_paths, [])
예제 #5
0
    def setUp(self) -> None:
        self.project_id_patcher = patch("recidiviz.utils.metadata.project_id")
        self.get_secret_patcher = patch("recidiviz.utils.secrets.get_secret")
        self.gcs_file_system_patcher = patch(
            "recidiviz.reporting.email_generation.GcsfsFactory.build"
        )
        test_secrets = {"po_report_cdn_static_IP": "123.456.7.8"}
        self.get_secret_patcher.start().side_effect = test_secrets.get
        self.project_id_patcher.start().return_value = "recidiviz-test"
        self.gcs_file_system = FakeGCSFileSystem()
        self.mock_gcs_file_system = self.gcs_file_system_patcher.start()
        self.mock_gcs_file_system.return_value = self.gcs_file_system

        with open(self.fixture_file_path()) as fixture_file:
            self.recipient = Recipient.from_report_json(json.loads(fixture_file.read()))

        self.state_code = StateCode.US_ID
        self.mock_batch_id = "1"
        self.recipient.data["batch_id"] = self.mock_batch_id
        self.report_context = self.report_context_type(self.state_code, self.recipient)
예제 #6
0
    def setUp(self) -> None:
        self.project_id_patcher = patch("recidiviz.utils.metadata.project_id")
        self.get_secret_patcher = patch("recidiviz.utils.secrets.get_secret")
        self.gcs_file_system_patcher = patch(
            "recidiviz.reporting.email_generation.GcsfsFactory.build"
        )
        test_secrets = {"po_report_cdn_static_IP": "123.456.7.8"}
        self.get_secret_patcher.start().side_effect = test_secrets.get
        self.project_id_patcher.start().return_value = "recidiviz-test"
        self.gcs_file_system = FakeGCSFileSystem()
        self.mock_gcs_file_system = self.gcs_file_system_patcher.start()
        self.mock_gcs_file_system.return_value = self.gcs_file_system

        with open(
            os.path.join(
                f"{os.path.dirname(__file__)}/context/po_monthly_report", FIXTURE_FILE
            )
        ) as fixture_file:
            self.recipient = Recipient.from_report_json(json.loads(fixture_file.read()))

        self.state_code = "US_ID"
        self.mock_batch_id = "1"
        self.recipient.data["batch_id"] = self.mock_batch_id
        self.report_context = PoMonthlyReportContext(self.state_code, self.recipient)
예제 #7
0
 Recipient.from_report_json({
     utils.KEY_EMAIL_ADDRESS:
     "*****@*****.**",
     utils.KEY_STATE_CODE:
     "US_ID",
     utils.KEY_DISTRICT:
     "US_ID_D3",
     OFFICER_GIVEN_NAME:
     "Alex",
     "assessment_name":
     "LSI-R",
     "mismatches": [
         {
             "name": "Tonye Thompson",
             "person_external_id": "189472",
             "last_score": 14,
             "last_assessment_date": "10/12/20",
             "current_supervision_level": "Medium",
             "recommended_level": "Low",
         },
         {
             "name": "Linet Hansen",
             "person_external_id": "47228",
             "last_assessment_date": "1/12/21",
             "last_score": 8,
             "current_supervision_level": "Medium",
             "recommended_level": "Low",
         },
         {
             "name": "Rebekah Cortes",
             "person_external_id": "132878",
             "last_assessment_date": "3/14/20",
             "last_score": 10,
             "current_supervision_level": "High",
             "recommended_level": "Medium",
         },
         {
             "name": "Taryn Berry",
             "person_external_id": "147872",
             "last_assessment_date": "3/13/20",
             "last_score": 4,
             "current_supervision_level": "High",
             "recommended_level": "Low",
         },
     ],
 }),
예제 #8
0
 Recipient.from_report_json({
     utils.KEY_EMAIL_ADDRESS:
     "*****@*****.**",
     utils.KEY_STATE_CODE:
     "US_ID",
     utils.KEY_DISTRICT:
     "US_ID_D3",
     "pos_discharges":
     0,
     "earned_discharges":
     0,
     "supervision_downgrades":
     0,
     "technical_revocations":
     0,
     "crime_revocations":
     0,
     "absconsions":
     0,
     "pos_discharges_district_average":
     0,
     "pos_discharges_state_average":
     0,
     "earned_discharges_district_average":
     0,
     "earned_discharges_state_average":
     0,
     "supervision_downgrades_district_average":
     0,
     "supervision_downgrades_state_average":
     0,
     "technical_revocations_district_average":
     0,
     "technical_revocations_state_average":
     0,
     "crime_revocations_district_average":
     0,
     "crime_revocations_state_average":
     0,
     "absconsions_district_average":
     0,
     "absconsions_state_average":
     0,
     "pos_discharges_last_month":
     0,
     "earned_discharges_last_month":
     0,
     "supervision_downgrades_last_month":
     0,
     "technical_revocations_last_month":
     0,
     "crime_revocations_last_month":
     0,
     "absconsions_last_month":
     0,
     "pos_discharges_clients":
     0,
     "earned_discharges_clients":
     0,
     "supervision_downgrades_clients":
     0,
     "absconsions_clients":
     0,
     "assessments_out_of_date_clients":
     0,
     "facetoface_out_of_date_clients":
     0,
     "revocations_clients":
     0,
     "assessments":
     0,
     "assessments_percent":
     0,
     "facetoface":
     0,
     "facetoface_percent":
     0,
     "officer_external_id":
     0,
     "officer_given_name":
     "Clementine",
     "review_month":
     4,
     "mismatches": [
         {
             "name": "Tonye Thompson",
             "person_external_id": "189472",
             "last_score": 14,
             "last_assessment_date": "10/12/20",
             "current_supervision_level": "Medium",
             "recommended_level": "Low",
         },
         {
             "name": "Linet Hansen",
             "person_external_id": "47228",
             "last_assessment_date": "1/12/21",
             "last_score": 8,
             "current_supervision_level": "Medium",
             "recommended_level": "Low",
         },
         {
             "name": "Rebekah Cortes",
             "person_external_id": "132878",
             "last_assessment_date": "3/14/20",
             "last_score": 10,
             "current_supervision_level": "High",
             "recommended_level": "Medium",
         },
         {
             "name": "Taryn Berry",
             "person_external_id": "147872",
             "last_assessment_date": "3/13/20",
             "last_score": 4,
             "current_supervision_level": "High",
             "recommended_level": "Low",
         },
     ],
 }),
 def setUp(self) -> None:
     with open(
         os.path.join(os.path.dirname(__file__), FIXTURE_FILE)
     ) as fixture_file:
         self.recipient = Recipient.from_report_json(json.loads(fixture_file.read()))
         self.recipient.data["batch_id"] = "20201105123033"
예제 #10
0
def retrieve_data(state_code: str, report_type: str, batch_id: str) -> List[Recipient]:
    """Retrieves the data for email generation of the given report type for the given state.

    Get the data from Cloud Storage and return it in a list of dictionaries. Saves the data file into an archive
    bucket on completion, so that we have the ability to troubleshoot or re-generate a previous batch of emails
    later on.

    Args:
        state_code: State identifier used to retrieve appropriate data
        report_type: The type of report, used to determine the data file name
        batch_id: The identifier for this batch

    Returns:
        A list of recipient data dictionaries

    Raises:
        Non-recoverable errors that should stop execution. Attempts to catch and handle errors that are recoverable.
        Provides logging for debug purposes whenever possible.
    """
    data_bucket = utils.get_data_storage_bucket_name()
    data_filename = ""
    gcs_file_system = GcsfsFactory.build()
    try:
        data_filename = utils.get_data_filename(state_code, report_type)
        path = GcsfsFilePath.from_absolute_path(f"gs://{data_bucket}/{data_filename}")
        file_contents = gcs_file_system.download_as_string(path)
    except BaseException:
        logging.info("Unable to load data file %s/%s", data_bucket, data_filename)
        raise

    archive_bucket = utils.get_data_archive_bucket_name()
    archive_filename = ""
    try:
        archive_filename = utils.get_data_archive_filename(batch_id)
        archive_path = GcsfsFilePath.from_absolute_path(
            f"gs://{archive_bucket}/{archive_filename}"
        )
        gcs_file_system.upload_from_string(
            path=archive_path, contents=file_contents, content_type="text/json"
        )
    except Exception:
        logging.error(
            "Unable to archive the data file to %s/%s", archive_bucket, archive_filename
        )
        raise

    json_list = file_contents.splitlines()

    recipient_data: List[dict] = []
    for json_str in json_list:
        try:
            item = json.loads(json_str)
        except Exception as err:
            logging.error(
                "Unable to parse JSON found in the file %s. Offending json string is: '%s'. <%s> %s",
                data_filename,
                json_str,
                type(err).__name__,
                err,
            )
        else:
            recipient_data.append(item)

    logging.info(
        "Retrieved %s recipients from data file %s", len(recipient_data), data_filename
    )
    return [
        Recipient.from_report_json(
            {
                **recipient,
                utils.KEY_BATCH_ID: batch_id,
            }
        )
        for recipient in recipient_data
    ]
예제 #11
0
                err,
            )
        else:
            if email := item.get("email_address"):
                mismatches = _get_mismatch_data_for_officer(email)
                if mismatches is not None:
                    item["mismatches"] = mismatches
            recipient_data.append(item)

    logging.info(
        "Retrieved %s recipients from data file %s", len(recipient_data), data_filename
    )
    return [
        Recipient.from_report_json(
            {
                **recipient,
                utils.KEY_BATCH_ID: batch_id,
            }
        )
        for recipient in recipient_data
    ]


def _write_batch_metadata(
    *,
    batch_id: str,
    state_code: StateCode,
    report_type: ReportType,
    **metadata_fields: str,
) -> None:
    gcsfs = GcsfsFactory.build()
    gcsfs.upload_from_string(