def _update_pipeline_definition(self, push_event): try: try: update_repository_pipelines( repo_url=push_event.repository().repository_url(), cfg_set=self.cfg_set, whd_cfg=self.whd_cfg, ) except (JobMappingNotFoundError, ConfigElementNotFoundError) as e: # A config element was missing or o JobMapping for the given repository was present. # Print warning, reload and try again logger.warning( f'failed to update pipeline definition: {e}. Will reload config and try again.' ) # Attempt to fetch latest cfg from SS and replace it raw_dict = ccc.secrets_server.SecretsServerClient.default( ).retrieve_secrets() factory = ConfigFactory.from_dict(raw_dict) self.cfg_set = factory.cfg_set(self.cfg_set.name()) # retry update_repository_pipelines( repo_url=push_event.repository().repository_url(), cfg_set=self.cfg_set, whd_cfg=self.whd_cfg, ) except BaseException as be: logger.warning( f'failed to update pipeline definition - ignored {be}') import traceback try: traceback.print_exc() except BaseException: pass # ignore
def setUp(self): # type definitions types = { 'a_type': { 'model': { 'cfg_type_name': 'a_type', 'type': 'NamedModelElement' } }, 'defined_but_unused_type': { 'model': { 'cfg_type_name': 'defined_but_unused_type', 'type': 'NamedModelElement' } }, 'cfg_set': { 'model': { 'cfg_type_name': 'cfg_set', 'type': 'ConfigurationSet' } }, } # config sets cfg_sets = { 'singleton_set': { 'a_type': 'first_value_of_a' }, 'set_with_multiple_values': { 'a_type': { 'config_names': ['first_value_of_a', 'second_value_of_a'], 'default': 'second_value_of_a', }, }, } # value definitions values = { 'first_value_of_a': { 'some_value': 123 }, 'second_value_of_a': { 'some_value': 42 }, 'ignored_value_of_a': { 'some_value': 'xxx' }, } raw = {'cfg_types': types, 'cfg_set': cfg_sets, 'a_type': values} self.examinee = ConfigFactory.from_dict(raw)
def setUp(self): # type definitions types = { 'a_type': { 'model': { 'cfg_type_name': 'a_type', 'type': 'NamedModelElement' } }, 'cfg_set': { 'model': { 'cfg_type_name': 'cfg_set', 'type': 'ConfigurationSet' } } } # config sets cfg_sets = { 'first_set': { 'a_type': 'first_value_of_a' }, 'second_set': { 'a_type': 'second_value_of_a' } } # value definitions values = { 'first_value_of_a': { 'some_value': 123 }, 'second_value_of_a': { 'some_value': 42 }, 'ignored_value_of_a': { 'some_value': 'xxx' }, } raw = {'cfg_types': types, 'cfg_set': cfg_sets, 'a_type': values} self.examinee = ConfigFactory.from_dict(raw)
def test_from_dict_fails_on_missing_cfg_types(self): with self.assertRaises(ValueError): ConfigFactory.from_dict({})
def test_from_dict_fails_on_none(self): with self.assertRaises(Failure): ConfigFactory.from_dict(None)
def _parse_model(raw_dict): factory = ConfigFactory.from_dict(raw_dict) return factory
def deserialise(self, raw_dict): return ConfigFactory.from_dict(raw_dict)
def setUp(self): self.factory = ConfigFactory.from_dict(simple_cfg_dict()) self.first_cfg_set = self.factory.cfg_set('first_set') self.second_cfg_set = self.factory.cfg_set('second_set') self.set_with_two_of_a_kind = self.factory.cfg_set('set_with_two_of_a_kind')
def _update_pipeline_definition( self, push_event, delivery_id: str, repository: str, hostname: str, es_client: ccc.elasticsearch.ElasticSearchClient, dispatch_start_time: datetime.datetime, ): def _do_update( delivery_id: str, event_type: str, repository: str, hostname: str, dispatch_start_time: datetime.datetime, es_client: ccc.elasticsearch.ElasticSearchClient, ): repo_url = push_event.repository().repository_url() job_mapping_set = self.cfg_set.job_mapping() job_mapping = job_mapping_set.job_mapping_for_repo_url(repo_url, self.cfg_set) replicate_repository_pipelines( repo_url=repo_url, cfg_set=self.cfg_factory.cfg_set(job_mapping.replication_ctx_cfg_set()), whd_cfg=self.whd_cfg, ) process_end_time = datetime.datetime.now() process_total_seconds = (process_end_time - dispatch_start_time).total_seconds() webhook_delivery_metric = whd.metric.WebhookDelivery.create( delivery_id=delivery_id, event_type=event_type, repository=repository, hostname=hostname, process_total_seconds=process_total_seconds, ) if es_client: ccc.elasticsearch.metric_to_es( es_client=es_client, metric=webhook_delivery_metric, index_name=whd.metric.index_name(webhook_delivery_metric), ) try: _do_update( delivery_id=delivery_id, event_type='create', repository=repository, hostname=hostname, dispatch_start_time=dispatch_start_time, es_client=es_client, ) except (JobMappingNotFoundError, ConfigElementNotFoundError) as e: # A config element was missing or o JobMapping for the given repository was present. # Print warning, reload and try again logger.warning( f'failed to update pipeline definition: {e}. Will reload config and try again.' ) # Attempt to fetch latest cfg from SS and replace it raw_dict = ccc.secrets_server.SecretsServerClient.default().retrieve_secrets() self.cfg_factory = ConfigFactory.from_dict(raw_dict) self.cfg_set = self.cfg_factory.cfg_set(self.cfg_set.name()) # retry _do_update( delivery_id=delivery_id, event_type='create', repository=repository, hostname=hostname, dispatch_start_time=dispatch_start_time, es_client=es_client, )