def _get_database_name_for_state(state_code: StateCode, instance: DirectIngestInstance) -> str: """Returns the database name for the given state and instance""" return SQLAlchemyDatabaseKey.for_state_code( state_code, instance.database_version(SystemLevel.STATE, state_code=state_code), ).db_name
def main(state_code: StateCode, ingest_instance: DirectIngestInstance) -> None: """Executes the main flow of the script.""" print( f"RUN THE FOLLOWING COMMANDS IN ORDER TO DELETE ALL DATA FOR REGION [{state_code.value}]" ) print( "********************************************************************************" ) db_version = ingest_instance.database_version(SystemLevel.STATE, state_code) db_key = SQLAlchemyDatabaseKey.for_state_code(state_code=state_code, db_version=db_version) # Connect to correct database for instance first print(f"\\c {db_key.db_name}") # Then run deletion commands for cmd in generate_region_deletion_commands(state_code, db_version): print(cmd) print( "********************************************************************************" ) print("HOW TO PERFORM DELETION:") print( "1) Log into prod data client (`gcloud compute ssh prod-data-client --project=recidiviz-123`)" ) print("\n> For production deletion:") print( "2) Go to secret manager to get login credentials stored in `state_db_user` and `state_db_password` secrets:" "\n\thttps://console.cloud.google.com/security/secret-manager?organizationId=448885369991&" "project=recidiviz-123") print("3) Log into postgres database (`prod-state-psql`)") print("\n> For staging deletion:") print( "2) Go to secret manager to get login credentials stored in `state_db_user` and `state_db_password` secrets:" "\n\thttps://console.cloud.google.com/security/secret-manager?organizationId=448885369991&" "project=recidiviz-staging") print("3) Log into postgres database (`dev-state-psql`)") print("\n> For all:") print( "4) Paste full list of commands listed above in postgres command line and run. Some commands may take a " "while to run.")
def _export_database_to_gcs() -> Tuple[str, HTTPStatus]: try: state_code = StateCode(request.json["stateCode"]) ingest_instance = DirectIngestInstance( request.json["ingestInstance"].upper()) db_version = ingest_instance.database_version( system_level=SystemLevel.STATE, state_code=state_code) except ValueError: return "invalid parameters provided", HTTPStatus.BAD_REQUEST lock_manager = DirectIngestRegionLockManager.for_state_ingest( state_code, ingest_instance) if not lock_manager.can_proceed(): return ( "other locks blocking ingest have been acquired; aborting operation", HTTPStatus.CONFLICT, ) db_key = SQLAlchemyDatabaseKey.for_state_code(state_code, db_version) cloud_sql_client = CloudSQLClientImpl(project_id=project_id) operation_id = cloud_sql_client.export_to_gcs_sql( db_key, GcsfsFilePath.from_absolute_path( f"{STATE_INGEST_EXPORT_URI}/{db_version.value}/{state_code.value}" ), ) if operation_id is None: return ( "Cloud SQL export operation was not started successfully.", HTTPStatus.INTERNAL_SERVER_ERROR, ) operation_succeeded = cloud_sql_client.wait_until_operation_completed( operation_id, seconds_to_wait=GCS_IMPORT_EXPORT_TIMEOUT_SEC) if not operation_succeeded: return ( "Cloud SQL import did not complete within 60 seconds", HTTPStatus.INTERNAL_SERVER_ERROR, ) return operation_id, HTTPStatus.OK
def _is_legacy_instance(system_level: SystemLevel, region_code: str, ingest_instance: DirectIngestInstance) -> bool: return (ingest_instance.database_version(system_level, StateCode.get(region_code)) is SQLAlchemyStateDatabaseVersion.LEGACY)