def setUp(self) -> None: self.all_saas_files = \ [self.fxt.get_anymarkup('saas.gql.yml')] self.state_patcher = \ patch("reconcile.utils.saasherder.State", autospec=True) self.state_mock = self.state_patcher.start().return_value self.saas_file = \ self.fxt.get_anymarkup('saas.gql.yml') # ApiVersion is set in the saas gql query method in queries module self.saas_file["apiVersion"] = "v2" self.deploy_current_state_fxt = \ self.fxt.get_anymarkup('saas_deploy.state.json') self.post_deploy_current_state_fxt = \ self.fxt.get_anymarkup('saas_post_deploy.state.json') self.state_mock.get.side_effect = [ self.deploy_current_state_fxt, self.post_deploy_current_state_fxt ] self.saasherder = SaasHerder( [self.saas_file], thread_pool_size=1, gitlab=None, integration='', integration_version='', accounts={"name": "test-account"}, # Initiates State in SaasHerder settings={"hashLength": 24})
def test_attribute_multiple_saas_files_with_default_return_false(self): saas_files = [ { "path": "path1", "name": "name1", "managedResourceTypes": [], "resourceTemplates": [], "attrib": True, }, { "path": "path2", "name": "name2", "managedResourceTypes": [], "resourceTemplates": [], "attrib": True, }, ] saasherder = SaasHerder( saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, ) att = saasherder._get_saas_file_feature_enabled("attrib", default=True) self.assertFalse(att)
def run(dry_run, thread_pool_size=10): saas_files = queries.get_saas_files() if not saas_files: logging.error('no saas files found') sys.exit(1) # Remove saas-file targets that are disabled for saas_file in saas_files[:]: resource_templates = saas_file['resourceTemplates'] for rt in resource_templates[:]: targets = rt['targets'] for target in targets[:]: if target['disable']: targets.remove(target) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) jenkins_map = jenkins_base.get_jenkins_map() saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, accounts=accounts) trigger_specs = saasherder.get_moving_commits_diff(dry_run) already_triggered = [] error = False for job_spec in trigger_specs: saas_file_name = job_spec['saas_file_name'] env_name = job_spec['env_name'] instance_name = job_spec['instance_name'] job_name = get_openshift_saas_deploy_job_name(saas_file_name, env_name, settings) if job_name not in already_triggered: logging.info(['trigger_job', instance_name, job_name]) if dry_run: already_triggered.append(job_name) if not dry_run: jenkins = jenkins_map[instance_name] try: if job_name not in already_triggered: jenkins.trigger_job(job_name) already_triggered.append(job_name) saasherder.update_moving_commit(job_spec) except Exception: error = True logging.error( f"could not trigger job {job_name} in {instance_name}.") if error: sys.exit(1)
def test_check_saas_file_env_combo_not_unique(self): saas_files = [ { 'path': 'path1', 'name': 'long-name-which-is-too-long-to-produce-unique-combo', 'managedResourceTypes': [], 'resourceTemplates': [ { 'name': 'rt', 'targets': [ { 'namespace': { 'name': 'ns', 'environment': { 'name': 'env1' }, 'cluster': { 'name': 'cluster' } }, 'parameters': {} }, { 'namespace': { 'name': 'ns', 'environment': { 'name': 'env2' }, 'cluster': { 'name': 'cluster' } }, 'parameters': {} } ] } ], 'roles': [ {'users': [{'org_username': '******'}]} ] } ] saasherder = SaasHerder( saas_files, thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}, validate=True ) self.assertFalse(saasherder.valid)
def test_populate_desired_state_saas_file_delete(self): spec = {'delete': True} saasherder = SaasHerder([], thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}) desired_state = \ saasherder.populate_desired_state_saas_file(spec, None) self.assertIsNone(desired_state)
def run(dry_run, thread_pool_size=10): saas_files = queries.get_saas_files() if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) jenkins_map = jenkins_base.get_jenkins_map() saasherder = SaasHerder( saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, accounts=accounts) trigger_specs = saasherder.get_configs_diff() already_triggered = [] error = True # enter loop while error: error = False for job_spec in trigger_specs: saas_file_name = job_spec['saas_file_name'] env_name = job_spec['env_name'] instance_name = job_spec['instance_name'] job_name = get_openshift_saas_deploy_job_name( saas_file_name, env_name, settings) if job_name not in already_triggered: logging.info(['trigger_job', instance_name, job_name]) if dry_run: already_triggered.append(job_name) if not dry_run: jenkins = jenkins_map[instance_name] try: if job_name not in already_triggered: jenkins.trigger_job(job_name) already_triggered.append(job_name) saasherder.update_config(job_spec) except Exception as e: error = True logging.error( f"could not trigger job {job_name} " + f"in {instance_name}. details: {str(e)}" ) if error: time.sleep(10) # add to contants module once created
def test_validate_upstream_jobs_valid(self): saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) saasherder.validate_upstream_jobs(self.jjb) self.assertTrue(saasherder.valid)
def test_check_saas_file_upstream_not_used_with_commit_sha(self): saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) self.assertTrue(saasherder.valid)
def run(dry_run): saas_files = queries.get_saas_files() settings = queries.get_app_interface_settings() saasherder = SaasHerder(saas_files, thread_pool_size=1, gitlab=None, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, validate=True) if not saasherder.valid: sys.exit(1)
def test_check_saas_file_env_combo_unique(self): saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) self.assertTrue(saasherder.valid)
def setUp(self) -> None: self.all_saas_files = \ [self.fxt.get_anymarkup('saas.gql.yml')] self.state_patcher = \ patch("reconcile.utils.saasherder.State", autospec=True) self.state_mock = self.state_patcher.start().return_value self.ig_patcher = \ patch.object(SaasHerder, "_initiate_github", autospec=True) self.ig_patcher.start() self.image_auth_patcher = \ patch.object(SaasHerder, "_initiate_image_auth") self.image_auth_patcher.start() self.gfc_patcher = \ patch.object(SaasHerder, "_get_file_contents", autospec=True) gfc_mock = self.gfc_patcher.start() self.saas_file = \ self.fxt.get_anymarkup('saas.gql.yml') # ApiVersion is set in the saas gql query method in queries module self.saas_file["apiVersion"] = "v2" gfc_mock.return_value = (self.template, "url", "ahash") self.deploy_current_state_fxt = \ self.fxt.get_anymarkup('saas_deploy.state.json') self.post_deploy_current_state_fxt = \ self.fxt.get_anymarkup('saas_post_deploy.state.json') self.saasherder = SaasHerder( [self.saas_file], thread_pool_size=1, gitlab=None, integration='', integration_version='', accounts={"name": "test-account"}, # Initiates State in SaasHerder settings={"hashLength": 24}) # IMPORTANT: Populating desired state modify self.saas_files within # saasherder object. self.ri = ResourceInventory() for ns in ["test-ns-publisher", "test-ns-subscriber"]: for kind in ["Service", "Deployment"]: self.ri.initialize_resource_type(self.cluster, ns, kind) self.saasherder.populate_desired_state(self.ri) if self.ri.has_error_registered(): raise Exception("Errors registered in Resourceinventory")
def test_validate_upstream_jobs_invalid(self): saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) jjb = MockJJB({"ci": []}) saasherder.validate_upstream_jobs(jjb) self.assertFalse(saasherder.valid)
def test_check_saas_file_promotion_same_source(self): rts = [ { "name": "rt_publisher", "url": "repo_publisher", "targets": [ { "namespace": { "name": "ns", "environment": {"name": "env1"}, "cluster": {"name": "cluster"}, }, "parameters": {}, "ref": "0000000000000", "promotion": { "publish": ["channel-1"], }, } ], }, { "name": "rt_subscriber", "url": "this-repo-will-not-match-the-publisher", "targets": [ { "namespace": { "name": "ns2", "environment": {"name": "env1"}, "cluster": {"name": "cluster"}, }, "parameters": {}, "ref": "0000000000000", "promotion": { "auto": "true", "subscribe": ["channel-1"], }, } ], }, ] self.saas_files[0]["resourceTemplates"] = rts saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) self.assertFalse(saasherder.valid)
def test_check_saas_file_env_combo_not_unique(self): self.saas_files[0][ "name" ] = "long-name-which-is-too-long-to-produce-unique-combo" saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) self.assertFalse(saasherder.valid)
def test_check_saas_file_upstream_used_with_commit_sha(self): self.saas_files[0]["resourceTemplates"][0]["targets"][0][ "ref" ] = "2637b6c41bda7731b1bcaaf18b4a50d7c5e63e30" saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) self.assertFalse(saasherder.valid)
def test_validate_image_tag_not_equals_ref_valid(self): self.saas_files[0]["resourceTemplates"][0]["targets"][0][ "parameters" ] = '{"IMAGE_TAG": "2637b6c"}' saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=True, ) self.assertTrue(saasherder.valid)
def test_attribute_none(self): saas_files = [{ 'path': 'path1', 'name': 'name1', 'managedResourceTypes': [], 'resourceTemplates': [] }] saasherder = SaasHerder(saas_files, thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}) att = saasherder._get_saas_file_feature_enabled('no_such_attribute') self.assertEqual(att, None)
def test_get_moving_commits_diff_saas_file_bad_sha1(self): saasherder = SaasHerder(self.saas_files, thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}, validate=False) saasherder.state = MagicMock() saasherder.state.get.return_value = 'asha' self.get_pipelines_provider.return_value = 'apipelineprovider' self.get_commit_sha.side_effect = GithubException( 401, 'somedata', {'aheader': 'avalue'}) # At least we don't crash! self.assertEqual( saasherder.get_moving_commits_diff_saas_file( self.saas_files[0], True), [])
def test_attribute_not_none_with_default(self): saas_files = [{ 'path': 'path1', 'name': 'name1', 'managedResourceTypes': [], 'resourceTemplates': [], 'attrib': True }] saasherder = SaasHerder(saas_files, thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}) att = saasherder._get_saas_file_feature_enabled('attrib', default=False) self.assertEqual(att, True)
def run(dry_run): saas_files = queries.get_saas_files(v1=True, v2=True) settings = queries.get_app_interface_settings() saasherder = SaasHerder( saas_files, thread_pool_size=1, gitlab=None, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, validate=True) app_int_repos = queries.get_repos() missing_repos = [r for r in saasherder.repo_urls if r not in app_int_repos] for r in missing_repos: logging.error(f'repo is missing from codeComponents: {r}') if not saasherder.valid or missing_repos: sys.exit(ExitCodes.ERROR)
def test_get_moving_commits_diff_saas_file_all_fine(self): saasherder = SaasHerder( self.saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, validate=False, ) saasherder.state = MagicMock() saasherder.state.get.return_value = "asha" self.get_commit_sha.side_effect = ("abcd4242", "4242efg") self.get_pipelines_provider.return_value = "apipelineprovider" expected = [ { "saas_file_name": self.saas_files[0]["name"], "env_name": "env1", "timeout": None, "ref": "main", "commit_sha": "abcd4242", "cluster_name": "cluster1", "pipelines_provider": "apipelineprovider", "namespace_name": "ns", "rt_name": "rt", }, { "saas_file_name": self.saas_files[0]["name"], "env_name": "env2", "timeout": None, "ref": "secondary", "commit_sha": "4242efg", "cluster_name": "cluster2", "pipelines_provider": "apipelineprovider", "namespace_name": "ns", "rt_name": "rt", }, ] self.assertEqual( saasherder.get_moving_commits_diff_saas_file(self.saas_files[0], True), expected, )
def test_attribute_none_with_default(self): saas_files = [ { "path": "path1", "name": "name1", "managedResourceTypes": [], "resourceTemplates": [], } ] saasherder = SaasHerder( saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, ) att = saasherder._get_saas_file_feature_enabled("no_such_att", default=True) self.assertEqual(att, True)
def test_collect_repo_urls(self): repo_url = "git-repo" saas_files = [ { "path": "path1", "name": "name1", "managedResourceTypes": [], "resourceTemplates": [{"name": "name", "url": repo_url, "targets": []}], } ] saasherder = SaasHerder( saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={}, ) self.assertEqual({repo_url}, saasherder.repo_urls)
def test_attribute_multiple_saas_files_return_false(self): saas_files = [{ 'path': 'path1', 'name': 'name1', 'managedResourceTypes': [], 'resourceTemplates': [], 'attrib': True }, { 'path': 'path2', 'name': 'name2', 'managedResourceTypes': [], 'resourceTemplates': [] }] saasherder = SaasHerder(saas_files, thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}) self.assertFalse(saasherder._get_saas_file_feature_enabled('attrib'))
def setUp(self): saas_files = [] self.fxts = Fixtures("saasherder_populate_desired") for file in [self.fxts.get("saas_remote_openshift_template.yaml")]: saas_files.append(yaml.safe_load(file)) self.assertEqual(1, len(saas_files)) self.saasherder = SaasHerder( saas_files, thread_pool_size=1, gitlab=None, integration="", integration_version="", settings={"hashLength": 7}, ) # Mock GitHub interactions. self.initiate_gh_patcher = patch.object( SaasHerder, "_initiate_github", autospec=True, return_value=None, ) self.get_file_contents_patcher = patch.object( SaasHerder, "_get_file_contents", wraps=self.fake_get_file_contents, ) self.initiate_gh_patcher.start() self.get_file_contents_patcher.start() # Mock image checking. self.get_check_images_patcher = patch.object( SaasHerder, "_check_images", autospec=True, return_value=None, ) self.get_check_images_patcher.start()
def test_collect_repo_urls(self): repo_url = 'git-repo' saas_files = [{ 'path': 'path1', 'name': 'name1', 'managedResourceTypes': [], 'resourceTemplates': [{ 'name': 'name', 'url': repo_url, 'targets': [] }] }] saasherder = SaasHerder(saas_files, thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}) self.assertEqual({repo_url}, saasherder.repo_urls)
def test_get_moving_commits_diff_saas_file_all_fine(self): saasherder = SaasHerder(self.saas_files, thread_pool_size=1, gitlab=None, integration='', integration_version='', settings={}, validate=False) saasherder.state = MagicMock() saasherder.state.get.return_value = 'asha' self.get_commit_sha.side_effect = ('abcd4242', '4242efg') self.get_pipelines_provider.return_value = 'apipelineprovider' expected = [{ 'saas_file_name': self.saas_files[0]['name'], 'env_name': 'env1', 'timeout': None, 'ref': 'main', 'commit_sha': 'abcd4242', 'cluster_name': 'cluster1', 'pipelines_provider': 'apipelineprovider', 'namespace_name': 'ns', 'rt_name': 'rt', }, { 'saas_file_name': self.saas_files[0]['name'], 'env_name': 'env2', 'timeout': None, 'ref': 'secondary', 'commit_sha': '4242efg', 'cluster_name': 'cluster2', 'pipelines_provider': 'apipelineprovider', 'namespace_name': 'ns', 'rt_name': 'rt', }] self.assertEqual( saasherder.get_moving_commits_diff_saas_file( self.saas_files[0], True), expected)
def run( dry_run, thread_pool_size=10, io_dir="throughput/", saas_file_name=None, env_name=None, gitlab_project_id=None, defer=None, ): all_saas_files = queries.get_saas_files(v1=True, v2=True) saas_files = queries.get_saas_files(saas_file_name, env_name, v1=True, v2=True) app_interface_settings = queries.get_app_interface_settings() if not saas_files: logging.error("no saas files found") sys.exit(ExitCodes.ERROR) # notify different outputs (publish results, slack notifications) # we only do this if: # - this is not a dry run # - there is a single saas file deployed notify = not dry_run and len(saas_files) == 1 if notify: saas_file = saas_files[0] slack_info = saas_file.get("slack") if slack_info: slack = slackapi_from_slack_workspace( slack_info, app_interface_settings, QONTRACT_INTEGRATION, init_usergroups=False, ) # support built-in start and end slack notifications # only in v2 saas files if saas_file["apiVersion"] == "v2": ri = ResourceInventory() console_url = compose_console_url(saas_file, saas_file_name, env_name) # deployment result notification defer( lambda: slack_notify( saas_file_name, env_name, slack, ri, console_url, in_progress=False, ) ) # deployment start notification slack_notifications = slack_info.get("notifications") if slack_notifications and slack_notifications.get("start"): slack_notify( saas_file_name, env_name, slack, ri, console_url, in_progress=True, ) else: slack = None instance = queries.get_gitlab_instance() # instance exists in v1 saas files only desired_jenkins_instances = [ s["instance"]["name"] for s in saas_files if s.get("instance") ] jenkins_map = jenkins_base.get_jenkins_map( desired_instances=desired_jenkins_instances ) settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() try: gl = GitLabApi(instance, settings=settings) except Exception: # allow execution without access to gitlab # as long as there are no access attempts. gl = None saasherder = SaasHerder( saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, jenkins_map=jenkins_map, accounts=accounts, ) if len(saasherder.namespaces) == 0: logging.warning("no targets found") sys.exit(ExitCodes.SUCCESS) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, init_api_resources=True, cluster_admin=saasherder.cluster_admin, ) defer(oc_map.cleanup) saasherder.populate_desired_state(ri) # validate that this deployment is valid # based on promotion information in targets if not saasherder.validate_promotions(): logging.error("invalid promotions") ri.register_error() sys.exit(ExitCodes.ERROR) # if saas_file_name is defined, the integration # is being called from multiple running instances actions = ob.realize_data( dry_run, oc_map, ri, thread_pool_size, caller=saas_file_name, wait_for_namespace=True, no_dry_run_skip_compare=(not saasherder.compare), take_over=saasherder.take_over, ) if not dry_run: if saasherder.publish_job_logs: try: ob.follow_logs(oc_map, actions, io_dir) except Exception as e: logging.error(str(e)) ri.register_error() try: ob.validate_data(oc_map, actions) except Exception as e: logging.error(str(e)) ri.register_error() # publish results of this deployment # based on promotion information in targets success = not ri.has_error_registered() # only publish promotions for deployment jobs (a single saas file) if notify: # Auto-promote next stages only if there are changes in the # promoting stage. This prevents trigger promotions on job re-runs auto_promote = len(actions) > 0 mr_cli = mr_client_gateway.init(gitlab_project_id=gitlab_project_id) saasherder.publish_promotions(success, all_saas_files, mr_cli, auto_promote) if not success: sys.exit(ExitCodes.ERROR) # send human readable notifications to slack # we only do this if: # - this is not a dry run # - there is a single saas file deployed # - output is 'events' # - no errors were registered if notify and slack and actions and slack_info.get("output") == "events": for action in actions: message = ( f"[{action['cluster']}] " + f"{action['kind']} {action['name']} {action['action']}" ) slack.chat_post_message(message)
def setup(thread_pool_size, internal, use_jump_host, integration, integration_version, v1, v2): """Setup required resources for triggering integrations Args: thread_pool_size (int): Thread pool size to use internal (bool): Should run for internal/extrenal/all clusters use_jump_host (bool): Should use jump host to reach clusters integration (string): Name of calling integration integration_version (string): Version of calling integration v1 (bool): Should trigger for v1 SaaS files v2 (bool): Should trigger for v2 SaaS files Returns: saasherder (SaasHerder): a SaasHerder instance jenkins_map (dict): Instance names with JenkinsApi instances oc_map (OC_Map): a dictionary of OC clients per cluster settings (dict): App-interface settings error (bool): True if one happened, False otherwise """ saas_files = queries.get_saas_files(v1=v1, v2=v2) if not saas_files: logging.error("no saas files found") return None, None, None, None, True saas_files = [sf for sf in saas_files if is_in_shard(sf["name"])] # Remove saas-file targets that are disabled for saas_file in saas_files[:]: resource_templates = saas_file["resourceTemplates"] for rt in resource_templates[:]: targets = rt["targets"] for target in targets[:]: if target["disable"]: targets.remove(target) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() accounts = queries.get_state_aws_accounts() gl = GitLabApi(instance, settings=settings) jenkins_map = jenkins_base.get_jenkins_map() pipelines_providers = queries.get_pipelines_providers() tkn_provider_namespaces = [ pp["namespace"] for pp in pipelines_providers if pp["provider"] == "tekton" ] oc_map = OC_Map( namespaces=tkn_provider_namespaces, integration=integration, settings=settings, internal=internal, use_jump_host=use_jump_host, thread_pool_size=thread_pool_size, ) saasherder = SaasHerder( saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=integration, integration_version=integration_version, settings=settings, jenkins_map=jenkins_map, accounts=accounts, ) return saasherder, jenkins_map, oc_map, settings, False
def run(dry_run, thread_pool_size=10, io_dir='throughput/', saas_file_name=None, env_name=None, gitlab_project_id=None, defer=None): all_saas_files = queries.get_saas_files() saas_files = queries.get_saas_files(saas_file_name, env_name) if not saas_files: logging.error('no saas files found') sys.exit(ExitCodes.ERROR) instance = queries.get_gitlab_instance() desired_jenkins_instances = [s['instance']['name'] for s in saas_files] jenkins_map = jenkins_base.get_jenkins_map( desired_instances=desired_jenkins_instances) settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() try: gl = GitLabApi(instance, settings=settings) except Exception: # allow execution without access to gitlab # as long as there are no access attempts. gl = None saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, jenkins_map=jenkins_map, accounts=accounts) if len(saasherder.namespaces) == 0: logging.warning('no targets found') sys.exit(ExitCodes.SUCCESS) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, init_api_resources=True) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) # validate that this deployment is valid # based on promotion information in targets if not saasherder.validate_promotions(): logging.error('invalid promotions') sys.exit(ExitCodes.ERROR) # if saas_file_name is defined, the integration # is being called from multiple running instances actions = ob.realize_data(dry_run, oc_map, ri, caller=saas_file_name, wait_for_namespace=True, no_dry_run_skip_compare=(not saasherder.compare), take_over=saasherder.take_over) if not dry_run: if saasherder.publish_job_logs: try: ob.follow_logs(oc_map, actions, io_dir) except Exception as e: logging.error(str(e)) ri.register_error() try: ob.validate_data(oc_map, actions) except Exception as e: logging.error(str(e)) ri.register_error() # publish results of this deployment # based on promotion information in targets success = not ri.has_error_registered() # only publish promotions for deployment jobs (a single saas file) if not dry_run and len(saasherder.saas_files) == 1: mr_cli = mr_client_gateway.init(gitlab_project_id=gitlab_project_id) saasherder.publish_promotions(success, all_saas_files, mr_cli) if not success: sys.exit(ExitCodes.ERROR) # send human readable notifications to slack # we only do this if: # - this is not a dry run # - there is a single saas file deployed # - output is 'events' # - no errors were registered if not dry_run and len(saasherder.saas_files) == 1: saas_file = saasherder.saas_files[0] slack_info = saas_file.get('slack') if slack_info and actions and slack_info.get('output') == 'events': slack = init_slack(slack_info, QONTRACT_INTEGRATION, init_usergroups=False) for action in actions: message = \ f"[{action['cluster']}] " + \ f"{action['kind']} {action['name']} {action['action']}" slack.chat_post_message(message)