def test_fields_custom_short(self): with self.assertRaises(ValueError) as ex_catcher: id_tools.parse_id(D.join(['a', 'b']), fields=['one_only']) assert 'contains 2 parts which is more than the number of declared id components' in str( ex_catcher.exception)
def iter_tasks( self ) -> Generator[Tuple[CeleryTask, JobScope, JobContext, int], None, None]: """Read persisted jobs and pass-through context objects for inspection""" with self.queue.JobsReader() as jobs_iter: for job_id, job_scope_additional_data, score in jobs_iter: job_id_parts = parse_id(job_id) job_scope = JobScope(job_scope_additional_data, job_id_parts, sweep_id=self.sweep_id, score=score) try: celery_task = resolve_job_scope_to_celery_task(job_scope) # TODO: Decide what to do with this. # Was designed for massive hash collection and such, # but cannot have too much data in there because we pickle it and put in on Redis job_context = JobContext() yield celery_task, job_scope, job_context, score logger.info( f"#{self.sweep_id}: Scheduling job_id {job_id} with score {score}." ) except InvalidJobScopeException as e: ErrorInspector.inspect(e, job_scope.ad_account_id, { 'sweep_id': job_scope.sweep_id, 'job_id': job_scope.job_id })
def test_resolve_job_scope_to_celery_task_ad_account( self, mock_lifetime_iter, mock_breakdowns_iter): real_claim = RealityClaim(entity_id='A1', ad_account_id='A1', entity_type=Entity.AdAccount, timezone='America/Los_Angeles') mock_lifetime_iter.return_value = [] mock_breakdowns_iter.return_value = [ RealityClaim( entity_id='AD1', ad_account_id='A1', entity_type=Entity.Ad, range_start=datetime(2019, 1, 20, 12, 0), timezone='America/Los_Angeles', ) ] for job_generator in entity_expectation_generator_map[ Entity.AdAccount]: for exp_claim in job_generator(real_claim): with self.subTest(job_generator=job_generator, exp_claim=exp_claim): job_scope = JobScope(parse_id(exp_claim.job_id)) assert inventory.resolve_job_scope_to_celery_task( job_scope)
def test_value_url_decoding(self): id_data_actual = id_tools.parse_id(D.join(['A+A', 'B%20B']), fields=['a', 'b', 'c']) id_data_should_be = dict(a='A A', b='B B', c=None) assert id_data_actual == id_data_should_be
def test_fields_custom_extra(self): id_data_actual = id_tools.parse_id(D.join(['A', 'B']), fields=['a', 'b', 'c']) id_data_should_be = dict(a='A', b='B', c=None) assert id_data_actual == id_data_should_be
def test_datetime_parsing_none_passthrough(self): datetime_fields = ['range_start', 'range_end'] for field in datetime_fields: id_data_actual = id_tools.parse_id(D.join(['A', '']), fields=['a', field]) id_data_should_be = {'a': 'A', field: None} assert id_data_actual == id_data_should_be
def test_datetime_parsing_date_type(self): datetime_fields = ['range_start', 'range_end'] for field in datetime_fields: id_data_actual = id_tools.parse_id(D.join(['A', '2010-01-01']), fields=['a', field]) id_data_should_be = {'a': 'A', field: date(2010, 1, 1)} assert id_data_actual == id_data_should_be
def test_resolve_job_scope_to_celery_task_page_post(self): real_claim = RealityClaim(entity_id='PP1', ad_account_id='P1', entity_type=Entity.PagePost) for job_generator in entity_expectation_generator_map[Entity.PagePost]: with self.subTest(job_generator=job_generator): exp_claim = next(job_generator(real_claim)) job_scope = JobScope(parse_id(exp_claim.job_id)) assert inventory.resolve_job_scope_to_celery_task(job_scope)
def test_datetime_parsing_goobledygook_passthrough(self): datetime_fields = ['range_start', 'range_end'] for field in datetime_fields: id_data_actual = id_tools.parse_id(D.join( ['A', '1234567890T03E04E05']), fields=['a', field]) id_data_should_be = {'a': 'A', field: '1234567890T03E04E05'} assert id_data_actual == id_data_should_be
def test_resolve_job_scope_to_celery_task_page(self): real_claim = RealityClaim(entity_id='P1', ad_account_id='P1', entity_type=Entity.Page, timezone='America/Los_Angeles') for job_generator in entity_expectation_generator_map[Entity.Page]: for exp_claim in job_generator(real_claim): with self.subTest(job_generator=job_generator, exp_claim=exp_claim): job_scope = JobScope(parse_id(exp_claim.job_id)) assert inventory.resolve_job_scope_to_celery_task( job_scope)
def test_datetime_parsing_datetime_type_for_zeros(self): datetime_fields = ['range_start', 'range_end'] for field in datetime_fields: id_data_actual = id_tools.parse_id(D.join(['A', '2010-01-01T00']), fields=['a', field]) id_data_should_be = { 'a': 'A', field: datetime(2010, 1, 1, 0, 0, 0) } assert id_data_actual == id_data_should_be, 'Must not be downgraded to date type, even with zero hours'
def test_fields_default_universal(self): id_data_actual = id_tools.parse_id(D.join(['oprm', 'm', NS, '123']), fields=id_tools.universal_id_fields) id_data_should_be = dict( component_vendor='oprm', component_id='m', namespace=NS, ad_account_id='123', entity_id=None, entity_type=None, range_end=None, range_start=None, report_type=None, report_variant=None, ) assert id_data_actual == id_data_should_be
def test_fields_default(self): id_data_actual = id_tools.parse_id( D.join([NS, '123']) # fields=['a', 'b', 'c'] ) id_data_should_be = dict( namespace=NS, ad_account_id='123', entity_id=None, entity_type=None, range_end=None, range_start=None, report_type=None, report_variant=None, ) assert id_data_actual == id_data_should_be