def test_multilevel_compose(self) -> None: base_queue = queue_pb2.Queue( rate_limits=queue_pb2.RateLimits( max_dispatches_per_second=100, max_concurrent_dispatches=1, ), retry_config=queue_pb2.RetryConfig(max_attempts=5, ), ) queue = (ProtobufBuilder(queue_pb2.Queue).compose(base_queue).compose( queue_pb2.Queue(rate_limits=queue_pb2.RateLimits( max_dispatches_per_second=50, ), )).update_args( retry_config=queue_pb2.RetryConfig(max_doublings=4, )).build()) expected_queue = queue_pb2.Queue( rate_limits=queue_pb2.RateLimits( max_dispatches_per_second=50, max_concurrent_dispatches=1, ), retry_config=queue_pb2.RetryConfig( max_attempts=5, max_doublings=4, ), ) self.assertEqual(queue, expected_queue)
from recidiviz.common.google_cloud.google_cloud_tasks_shared_queues import \ DIRECT_INGEST_SCHEDULER_QUEUE_V2, \ DIRECT_INGEST_STATE_PROCESS_JOB_QUEUE_V2, \ DIRECT_INGEST_JAILS_PROCESS_JOB_QUEUE_V2, BIGQUERY_QUEUE_V2, \ JOB_MONITOR_QUEUE_V2, SCRAPER_PHASE_QUEUE_V2, DIRECT_INGEST_BQ_IMPORT_EXPORT_QUEUE_V2 from recidiviz.common.google_cloud.google_cloud_tasks_client_wrapper import \ GoogleCloudTasksClientWrapper from recidiviz.common.google_cloud.protobuf_builder import ProtobufBuilder from recidiviz.utils import vendors, regions DIRECT_INGEST_QUEUE_BASE_CONFIG = queue_pb2.Queue( rate_limits=queue_pb2.RateLimits( max_dispatches_per_second=100, max_concurrent_dispatches=1, ), retry_config=queue_pb2.RetryConfig(max_attempts=5, ), stackdriver_logging_config=queue_pb2.StackdriverLoggingConfig( sampling_ratio=1.0, )) BIGQUERY_QUEUE_CONFIG = queue_pb2.Queue( name=BIGQUERY_QUEUE_V2, rate_limits=queue_pb2.RateLimits( max_dispatches_per_second=1, max_concurrent_dispatches=1, ), retry_config=queue_pb2.RetryConfig(max_attempts=1, ), stackdriver_logging_config=queue_pb2.StackdriverLoggingConfig( sampling_ratio=1.0, )) JOB_MONITOR_QUEUE_CONFIG = queue_pb2.Queue( name=JOB_MONITOR_QUEUE_V2,
def test_initialize_queues(self, mock_regions): # Arrange region_xx = fake_region( region_code='us_xx', queue={'rate_limits': { 'max_dispatches_per_second': 0.3 }}) region_xx.get_queue_name.return_value = 'us_xx_queue' region_yy = fake_region(region_code='us_yy') region_yy.get_queue_name.return_value = 'us_yy_queue' mock_regions.return_value = [region_xx, region_yy] # Act with metadata.local_project_id_override('my-project-id'): google_cloud_task_queue_config.initialize_queues( google_auth_token='fake-auth-token') # Assert queues_updated_by_id = self.get_updated_queues() for queue in queues_updated_by_id.values(): self.assertTrue( queue.name.startswith( 'projects/my-project-id/locations/us-east1/queues/')) self.assertEqual(queue.stackdriver_logging_config.sampling_ratio, 1.0) direct_ingest_queue_ids = { 'direct-ingest-state-process-job-queue-v2', 'direct-ingest-jpp-process-job-queue-v2', 'direct-ingest-bq-import-export-v2', 'direct-ingest-scheduler-v2' } self.assertFalse( direct_ingest_queue_ids.difference(queues_updated_by_id.keys())) for queue_id in direct_ingest_queue_ids: queue = queues_updated_by_id[queue_id] self.assertEqual(queue.rate_limits.max_concurrent_dispatches, 1) # Test that composition works as expected self.assertEqual( queues_updated_by_id[region_xx.get_queue_name()], queue_pb2.Queue( name= 'projects/my-project-id/locations/us-east1/queues/us_xx_queue', rate_limits=queue_pb2.RateLimits( # This is overridden in the mock above max_dispatches_per_second=0.3, max_concurrent_dispatches=3, ), retry_config=queue_pb2.RetryConfig( min_backoff=duration_pb2.Duration(seconds=5), max_backoff=duration_pb2.Duration(seconds=300), max_attempts=5, ), stackdriver_logging_config=queue_pb2.StackdriverLoggingConfig( sampling_ratio=1.0, ))) # Test that other regions are unaffected self.assertEqual( queues_updated_by_id[region_yy.get_queue_name()], queue_pb2.Queue( name= 'projects/my-project-id/locations/us-east1/queues/us_yy_queue', rate_limits=queue_pb2.RateLimits( max_dispatches_per_second=0.08333333333, max_concurrent_dispatches=3, ), retry_config=queue_pb2.RetryConfig( min_backoff=duration_pb2.Duration(seconds=5), max_backoff=duration_pb2.Duration(seconds=300), max_attempts=5, ), stackdriver_logging_config=queue_pb2.StackdriverLoggingConfig( sampling_ratio=1.0, ))) self.assertTrue('bigquery-v2' in queues_updated_by_id) self.assertTrue('job-monitor-v2' in queues_updated_by_id) self.assertTrue('scraper-phase-v2' in queues_updated_by_id)
def test_initialize_queues(self, mock_regions: Mock) -> None: # Arrange region_xx = fake_region( region_code="us_xx", queue={"rate_limits": { "max_dispatches_per_second": 0.3 }}, get_queue_name=lambda: "us_xx_queue", ) region_yy = fake_region(region_code="us_yy", get_queue_name=lambda: "us_yy_queue") mock_regions.return_value = [region_xx, region_yy] # Act with metadata.local_project_id_override("my-project-id"): google_cloud_task_queue_config.initialize_queues( google_auth_token="fake-auth-token") # Assert queues_updated_by_id = self.get_updated_queues() for queue in queues_updated_by_id.values(): self.assertTrue( queue.name.startswith( "projects/my-project-id/locations/us-east1/queues/")) self.assertEqual(queue.stackdriver_logging_config.sampling_ratio, 1.0) direct_ingest_queue_ids = { "direct-ingest-state-process-job-queue-v2", "direct-ingest-jpp-process-job-queue-v2", "direct-ingest-bq-import-export-v2", "direct-ingest-scheduler-v2", } self.assertFalse( direct_ingest_queue_ids.difference(queues_updated_by_id.keys())) for queue_id in direct_ingest_queue_ids: queue = queues_updated_by_id[queue_id] self.assertEqual(queue.rate_limits.max_concurrent_dispatches, 1) # Test that composition works as expected self.assertEqual( queues_updated_by_id[region_xx.get_queue_name()], queue_pb2.Queue( name= "projects/my-project-id/locations/us-east1/queues/us_xx_queue", rate_limits=queue_pb2.RateLimits( # This is overridden in the mock above max_dispatches_per_second=0.3, max_concurrent_dispatches=3, ), retry_config=queue_pb2.RetryConfig( min_backoff=duration_pb2.Duration(seconds=5), max_backoff=duration_pb2.Duration(seconds=300), max_attempts=5, ), stackdriver_logging_config=queue_pb2.StackdriverLoggingConfig( sampling_ratio=1.0, ), ), ) # Test that other regions are unaffected self.assertEqual( queues_updated_by_id[region_yy.get_queue_name()], queue_pb2.Queue( name= "projects/my-project-id/locations/us-east1/queues/us_yy_queue", rate_limits=queue_pb2.RateLimits( max_dispatches_per_second=0.08333333333, max_concurrent_dispatches=3, ), retry_config=queue_pb2.RetryConfig( min_backoff=duration_pb2.Duration(seconds=5), max_backoff=duration_pb2.Duration(seconds=300), max_attempts=5, ), stackdriver_logging_config=queue_pb2.StackdriverLoggingConfig( sampling_ratio=1.0, ), ), ) self.assertTrue("bigquery-v2" in queues_updated_by_id) self.assertTrue("job-monitor-v2" in queues_updated_by_id) self.assertTrue("scraper-phase-v2" in queues_updated_by_id) self.assertTrue("admin-panel-data-discovery" in queues_updated_by_id)