def test_vote_success_commitdistro(self, mock_api_report, mock_log_info, mock_log_debug, mock_log_error): dlrn_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b', timestamp=1) params = copy.deepcopy(self.client.report_params) params.aggregate_hash = None params.commit_hash = dlrn_hash.commit_hash params.distro_hash = dlrn_hash.distro_hash params.job_id = 'job1' params.notes = None params.success = str(True) params.timestamp = dlrn_hash.timestamp params.url = "https://job.url" mock_api_report.return_value = True str_params = str(params).replace('\n', ' ').replace('\r', ' ') api_response = self.client.vote(dlrn_hash, params.job_id, params.url, params.success) mock_log_debug.assert_has_calls([ mock.call('Dlrn voting success: %s for dlrn_hash %s', 'True', dlrn_hash) ]) mock_log_info.assert_has_calls([ mock.call('Dlrn voting success: %s for job %s with parameters %s', 'True', 'job1', str_params), mock.call('Dlrn voted success: %s for job %s on hash %s', 'True', 'job1', dlrn_hash), ]) self.assertTrue(api_response) self.assertFalse(mock_log_error.called)
def test_build_invalid_from_source(self): with self.assertRaises(DlrnHashError): source = hashes_test_cases['commitdistro']['dict']['invalid'] DlrnCommitDistroHash(source=source) with self.assertRaises(DlrnHashError): source = hashes_test_cases['aggregate']['dict']['invalid'] DlrnAggregateHash(source=source)
def setUp(self): super(TestPrepareExtraVars, self).setUp() self.client = self.promoter.registries_client self.dlrn_hash_commitdistro = DlrnCommitDistroHash(commit_hash='abc', distro_hash='def', component="comp1", timestamp=1)
def test_promote_label_to_label_success(self, mock_civotes, mock_fetch_jobs, mock_promote, mock_select_candidates, mock_log_warning, mock_log_info, mock_log_error, mock_log_debug): ci_votes = "http://host.to/detailspage.html" candidate_hashes = [ DlrnCommitDistroHash(commit_hash='a', distro_hash='b'), DlrnCommitDistroHash(commit_hash='c', distro_hash='c') ] required_set = { 'periodic-tripleo-centos-7-master-containers-build-push', 'periodic-tripleo-centos-7-master-standalone' } pair = (candidate_hashes[0], 'current-tripleo') mock_promote.return_value = pair mock_civotes.return_value = ci_votes mock_select_candidates.return_value = candidate_hashes mock_fetch_jobs.return_value = list(required_set) promoted_pair = self.promoter.promote_label_to_label( 'tripleo-ci-testing', 'current-tripleo') mock_log_debug.assert_has_calls([ mock.call("Candidate hash '%s': required jobs %s", candidate_hashes[0], required_set) ]) mock_log_info.assert_has_calls([ mock.call("Candidate label '%s': %d candidates", 'tripleo-ci-testing', 2), mock.call( "Candidate label '%s': Checking candidates that meet " "promotion criteria for target label '%s'", 'tripleo-ci-testing', 'current-tripleo'), mock.call("Candidate hash '%s' vote details page: %s", candidate_hashes[0], ci_votes), mock.call("Candidate hash '%s': successful jobs %s", candidate_hashes[0], required_set), mock.call( "Candidate hash '%s': criteria met, attempting promotion " "to %s", candidate_hashes[0], 'current-tripleo'), ]) self.assertFalse(mock_log_warning.called) self.assertFalse(mock_log_error.called) self.assertEqual(promoted_pair, pair) # Ensure that we stop at the first promotion self.assertEqual(mock_promote.call_count, 1)
def test_get_civotes_info_commitdistro(self): dlrn_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') get_detail = self.client.get_civotes_info(dlrn_hash) urlparse(get_detail) detail = ("Check results at: " "http://api.url/api/civotes_detail.html?commit_hash=a" "&distro_hash=b") self.assertEqual(get_detail, detail)
def test_vote_empty_api_response(self, mock_api_report, mock_log_error): mock_api_report.return_value = [] dlrn_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b', timestamp=1) with self.assertRaises(PromotionError): self.client.vote(dlrn_hash, 'job_id', 'url', True) mock_log_error.assert_has_calls([ mock.call('Dlrn voting success: %s for dlrn_hash %s: API vote ' 'response is empty', str(True), dlrn_hash), mock.call("------- -------- Promoter aborted") ])
def test_fetch_jobs_no_jobs(self, api_repo_status_get_mock, mock_log_debug, mock_log_error): api_repo_status_get_mock.return_value = [] dlrn_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') job_list = self.client.fetch_jobs(dlrn_hash) self.assertEqual(len(job_list), 0) self.assertEqual(job_list, []) mock_log_debug.assert_has_calls([ mock.call("Hash '%s': fetching list of successful jobs", dlrn_hash), mock.call("No successful jobs for hash %s", dlrn_hash) ]) self.assertFalse(mock_log_error.called)
def test_fetch_jobs_api_error(self, api_repo_status_get_mock, mock_log_error): api_repo_status_get_mock.side_effect = self.api_exception dlrn_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') with self.assertRaises(ApiException): self.client.fetch_jobs(dlrn_hash) mock_log_error.assert_has_calls([ mock.call("Exception while fetching jobs from API endpoint (%s) " "%s: %s", self.api_exception.status, self.api_exception.reason, self.api_exception.message), mock.call("------- -------- Promoter aborted") ])
def test_promote_check_named_hashes_failed(self, mock_qcow_client, mock_registries_client, mock_dlrn_client, mock_check_named_hashes, mock_log_debug): candidate_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') mock_check_named_hashes.side_effect = HashChangedError with self.assertRaises(HashChangedError): self.promoter.promote(candidate_hash, 'tripleo-ci-testing', 'current-tripleo') self.assertFalse(mock_registries_client.called) self.assertFalse(mock_qcow_client.called) self.assertFalse(mock_dlrn_client.called)
def test_vote_api_error(self, mock_api_report, mock_log_error): mock_api_report.side_effect = self.api_exception dlrn_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b', timestamp=1) with self.assertRaises(ApiException): self.client.vote(dlrn_hash, 'job_id', 'url', True) mock_log_error.assert_has_calls([ mock.call('Dlrn voting success: %s for dlrn_hash %s: Error during ' 'voting through API: (%s) %s: %s', str(True), dlrn_hash, self.api_exception.status, self.api_exception.reason, self.api_exception.message), mock.call("------- -------- Promoter aborted") ])
def create_commit_hierarchy(self, commit): """ Every single pipeline commit or component commit has a hierarchy of files inside the DLRN repo. This sets up templates substitution variables and calls the method that will create all the additional files in the hierarchy for the commit :return: the location of the commit repo """ # Complete creation of repo structure # The logic here is quite hard # We are trying to simulate the step immediately before the promotion # For the single pipeline, the database provide enough information # For component pipeline, many things are done by dlrn build process, # and the database information is incomplete. For example, there is no # way to associate a commit to a component, and no way to retrieve # the list of commits that promoted to a specific name, the aggregate # promotion refers only to the last commit. # The api gives a bit more information, but the staging environment # creation comes before any api activation # To simplify test procedures we are going to pass two different # fixture files with db values specific to the case # One will be used for single pipeline # The other will be used for component pipeline dlrn_hash = DlrnCommitDistroHash(source=commit) subst_dict = { 'distro': self.distro, 'repo_root_files': self.repo_root_files, } # sets template subst variables if self.components_mode: repo_name = "-component-{}".format(commit['component']) else: repo_name = "" subst_dict['commit_dir'] = dlrn_hash.commit_dir subst_dict['repo_name'] = repo_name abs_commit_dir = os.path.join(self.repo_root_files, dlrn_hash.commit_dir) try: os.makedirs(abs_commit_dir) except OSError: pass self.create_additional_files(commit, abs_commit_dir, subst_dict) return self.repo_root_files
def test_promote_label_to_label_empty_promoted_pair( self, mock_civotes, mock_fetch_jobs, mock_promote, mock_select_candidates, mock_log_warning, mock_log_error): ci_votes = "http://host.to/detailspage.html" candidate_hashes = [ DlrnCommitDistroHash(commit_hash='a', distro_hash='b'), DlrnCommitDistroHash(commit_hash='c', distro_hash='c') ] required_set = { 'periodic-tripleo-centos-7-master-containers-build-push', 'periodic-tripleo-centos-7-master-standalone' } mock_promote.side_effect = [(), ()] mock_civotes.return_value = ci_votes mock_select_candidates.return_value = candidate_hashes mock_fetch_jobs.return_value = list(required_set) promoted_pair = self.promoter.promote_label_to_label( 'tripleo-ci-testing', 'current-tripleo') self.assertFalse(mock_log_warning.called) self.assertFalse(mock_log_error.called) # Ensure that we try two promotions self.assertEqual(mock_promote.call_count, 2) self.assertEqual(promoted_pair, ())
def test_build_valid_from_source(self): for hash_type, source_types in hashes_test_cases.items(): values = source_types['dict']['valid'] if hash_type == "commitdistro": dh = DlrnCommitDistroHash(source=values) self.assertEqual(dh.commit_hash, source_types['dict']['valid']['commit_hash']) self.assertEqual(dh.distro_hash, source_types['dict']['valid']['distro_hash']) elif hash_type == "aggregate": aggregate_hash = source_types['dict']['valid'][ 'aggregate_hash'] dh = DlrnAggregateHash(source=values) self.assertEqual(dh.aggregate_hash, aggregate_hash) self.assertEqual(dh.timestamp, source_types['dict']['valid']['timestamp'])
def test_promote_label_to_label_missing_jobs(self, mock_civotes, mock_fetch_jobs, mock_promote, mock_select_candidates, mock_log_warning, mock_log_info, mock_log_error, mock_log_debug): ci_votes = "http://host.to/detailspage.html" mock_civotes.return_value = ci_votes required_set = { 'periodic-tripleo-centos-7-master-containers-build-push', 'periodic-tripleo-centos-7-master-standalone' } successful_jobs = [ 'periodic-tripleo-centos-7-master-containers-build-push', ] missing_jobs = ['periodic-tripleo-centos-7-master-standalone'] mock_fetch_jobs.return_value = successful_jobs candidate_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') mock_select_candidates.return_value = [candidate_hash] promoted_pair = self.promoter.promote_label_to_label( 'tripleo-ci-testing', 'current-tripleo') mock_log_debug.assert_has_calls([ mock.call("Candidate hash '%s': required jobs %s", candidate_hash, required_set) ]) mock_log_warning.assert_has_calls([ mock.call("Candidate hash '%s': missing jobs %s", candidate_hash, missing_jobs), mock.call( "Candidate hash '%s': criteria NOT met for promotion to " "%s", candidate_hash, 'current-tripleo'), ]) mock_log_info.assert_has_calls([ mock.call("Candidate label '%s': %d candidates", 'tripleo-ci-testing', 1), mock.call( "Candidate label '%s': Checking candidates that meet " "promotion criteria for target label '%s'", 'tripleo-ci-testing', 'current-tripleo'), mock.call("Candidate hash '%s' vote details page: %s", candidate_hash, ci_votes), mock.call("Candidate hash '%s': successful jobs %s", candidate_hash, set(successful_jobs)) ]) self.assertFalse(mock_promote.called) self.assertFalse(mock_log_error.called) self.assertEqual(promoted_pair, ())
def test_promote_label_to_label_missing_jobs_no_successful( self, mock_civotes, mock_fetch_jobs, mock_promote, mock_select_candidates, mock_log_warning, mock_log_error): ci_votes = "http://host.to/detailspage.html" mock_civotes.return_value = ci_votes successful_jobs = [] mock_fetch_jobs.return_value = successful_jobs candidate_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') mock_select_candidates.return_value = [candidate_hash] promoted_pair = self.promoter.promote_label_to_label( 'tripleo-ci-testing', 'current-tripleo') mock_log_warning.assert_has_calls([ mock.call("Candidate hash '%s': NO successful jobs", candidate_hash), ]) self.assertFalse(mock_promote.called) self.assertFalse(mock_log_error.called) self.assertEqual(promoted_pair, ())
def test_force_promote_success(self, single_promote_mock, start_process_mock, init_mock, legacy_config_mock): candidate_hash = DlrnCommitDistroHash(commit_hash="a", distro_hash="b") cmd_line = ("--config-file config.ini force-promote " "--commit-hash a " "--distro-hash b " "tripleo-ci-testing " "current-tripleo") promoter_main(cmd_line=cmd_line) self.assertTrue(init_mock.called) self.assertFalse(start_process_mock.called) single_promote_mock.assert_has_calls([ mock.call(mock.ANY, candidate_hash, 'tripleo-ci-testing', 'current-tripleo') ])
def test_promote_only_dlrn_client_allowed(self, mock_qcow_client, mock_registries_client, mock_dlrn_client, mock_check_named_hashes, mock_log_debug): candidate_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') mock_check_named_hashes.return_value = None self.promoter.promote(candidate_hash, 'tripleo-ci-testing', 'current-tripleo', allowed_clients=['dlrn_client']) mock_log_debug.assert_has_calls([ mock.call("Candidate hash '%s': clients allowed to promote: %s", candidate_hash, 'dlrn_client'), ]) self.assertFalse(mock_registries_client.called) self.assertFalse(mock_qcow_client.called) self.assertTrue(mock_dlrn_client.called)
def promote_aggregate(self, commit, promotion_alias): """ In the integration pipeline, we are not going to simulate the creation of the aggregate hash repo subtree with templates. That would mean replicating a lot of DLRN server code. So we use the server itself, pointing to the right top dir of the subtree and feeding it with component promotions :param commit: A dict with the commit information :param promotion_alias: The alias in the promotions map to get to the actual target_label :return: """ dlrn_hash = DlrnCommitDistroHash(source=commit) self.client.promote(dlrn_hash, commit['name'], create_previous=False) promotion_hash = self.client.fetch_promotions_from_hash(dlrn_hash, count=1) # For aggregate, the promotion map we need contains promotion_hashes # not the commit hashes, so we overwrite the existing map # and add the promotion alias to the commit if promotion_alias is not None: promotion_dict = promotion_hash.dump_to_dict() promotion_dict['name'] = commit['name'] promotion_dict['full_hash'] = promotion_hash.full_hash self.promotions[promotion_alias] = promotion_dict self.rev_promotions[commit['id']] = promotion_alias # Since the aggregate hashes are dynamically created, the ci # votes also need to be dynamically generated # We vote for a single job for every aggregate self.client.vote(promotion_hash, "staging-job-1", "http://nowhe.re", True) # Then we vote for the second job in the criteria also for promotion # candidate, so it will have all required votes. if promotion_alias == 'promotion_candidate': self.client.vote(promotion_hash, "staging-job-2", "http://nowhe.re", True) # if we promote too fast, it may happen that two hashes have the same # timestamp, which will break the simulation of a normal sequence of # builds # Wait 1 second to be sure that the next promotion has a different # timestamp time.sleep(1)
def get_hash_from_component(self, log_header, component_name, base_url): """ Downloads the commit.yaml file relative to the component, and creates an hash with all the information in it :param log_header: the header for all logging messages :param component_name: the name of the component :param base_url: The base url taken from the component in the main delorean.repo file :return: A DlrnDistroCommitHash containing the information of the promoted component """ self.log.debug("%s base url url for component %s at %s", log_header, component_name, base_url) commit_url = "{}/{}".format(base_url, "commit.yaml") self.log.debug("%s commit info url for component %s at %s", log_header, component_name, commit_url) try: # FIXME: in python2 urlopen is not a context manager with contextlib.closing(url.urlopen(commit_url)) as commits_yaml: commits = yaml.safe_load(commits_yaml.read().decode("UTF-8")) # FIXME(gcerami) it is very difficult to make urlopen generate # url.HTTPError (without mocking side effect directly), so this part # is only partially tested except (url.HTTPError, url.URLError): self.log.error( "Dlrn Promote: Error downloading component yaml info" " at %s", commit_url) self.log.error("------- -------- Promoter aborted") raise PromotionError("Unable to fetch commits from component url") # AP step4: from commits.yaml extract commit/distro_hash to # promote and create an Hash object promotion_info = commits['commits'][0] promotion_info['timestamp'] = promotion_info['dt_commit'] self.log.debug("%s component '%s' commit info: %s", log_header, component_name, promotion_info) # AP step5: add hashes to promotion list promotion_hash = DlrnCommitDistroHash(source=promotion_info) self.log.debug("%s adding '%s' to promotion list", log_header, promotion_hash) return promotion_hash
def test_fetch_jobs_success(self, api_repo_status_get_mock, api_agg_status_get_mock, mock_log_debug, mock_log_error): api_repo_status_get_mock.return_value = self.api_jobs dlrn_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') job_list = self.client.fetch_jobs(dlrn_hash) self.assertEqual(len(job_list), 2) self.assertEqual(job_list, ["job0", "job1"]) # WOrks locally but not in upstream, debugging by commenting mock_log_debug.assert_has_calls([ mock.call("Hash '%s': fetching list of successful jobs", dlrn_hash), # mock.call("Fetched %d successful jobs for hash %s", 2, dlrn_hash), # mock.call("%s passed on %s, logs at '%s'", 'job0', # '1970-01-01T01:00:00', 'https://dev/null'), # mock.call("%s passed on %s, logs at '%s'", 'job1', # '1970-01-01T01:00:01', 'https://dev/null') ]) self.assertFalse(api_agg_status_get_mock.called) self.assertTrue(api_repo_status_get_mock.called) self.assertFalse(mock_log_error.called)
def staged_promotion(self, commit): """ Creates symlinks to simulate a dlrn promotion in the repository Valid only for the single pipeline or component commit promotion. :param commit: A dict with info of the commit to promote :return: None """ target_label = commit['name'] dlrn_hash = DlrnCommitDistroHash(source=commit) link_path = os.path.join(self.repo_root_files, target_label) try: os.unlink(link_path) except OSError: pass try: os.symlink(dlrn_hash.commit_dir, link_path) except OSError: self.log.error( "Staged promotion for single pipeline commit: " "Unable to promote dlrn hash '%s' to %s" "", dlrn_hash, target_label) raise
def test_promote_success(self, mock_qcow_client, mock_registries_client, mock_dlrn_client, mock_check_named_hashes, mock_log_info, mock_log_error, mock_log_debug, mock_log_warning): candidate_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') mock_check_named_hashes.return_value = None # The order here is VERY important, and we MUST ensure it's respected allowed_clients = ['registries_client', 'qcow_client', 'dlrn_client'] mock_dlrn_client.return_value = None mock_qcow_client.return_value = None mock_registries_client.return_value = None promoted_pair = \ self.promoter.promote(candidate_hash, 'tripleo-ci-testing', 'current-tripleo') mock_log_debug.assert_has_calls([ mock.call("Candidate hash '%s': clients allowed to promote: %s", candidate_hash, ', '.join(allowed_clients)), mock.call("Candidate hash '%s': client %s SUCCESSFUL promotion", candidate_hash, allowed_clients[0]), mock.call("Candidate hash '%s': client %s SUCCESSFUL promotion", candidate_hash, allowed_clients[1]), mock.call("Candidate hash '%s': client %s SUCCESSFUL promotion", candidate_hash, allowed_clients[2]), ]) mock_log_info.assert_has_calls([ mock.call("Candidate hash '%s': attempting promotion", candidate_hash), mock.call("Candidate hash '%s': SUCCESSFUL promotion to %s", candidate_hash, 'current-tripleo'), ]) self.assertFalse(mock_log_warning.called) self.assertFalse(mock_log_error.called) self.assertTrue(mock_check_named_hashes.called) self.assertTrue(mock_registries_client.called) self.assertTrue(mock_qcow_client.called) self.assertTrue(mock_dlrn_client.called) self.assertEqual(promoted_pair, (candidate_hash, 'current-tripleo'))
def test_promote_failure(self, mock_qcow_client, mock_registries_client, mock_dlrn_client, mock_check_named_hashes, mock_log_error, mock_log_exception): mock_dlrn_client.return_value = None mock_qcow_client.side_effect = PromotionError mock_registries_client.return_value = None mock_check_named_hashes.return_value = None candidate_hash = DlrnCommitDistroHash(commit_hash='a', distro_hash='b') with self.assertRaises(PromotionError): self.promoter.promote(candidate_hash, 'tripleo-ci-testing', 'current-tripleo') mock_log_error.assert_has_calls([ mock.call( "Candidate hash '%s': client %s FAILED promotion attempt " "to %s" "", candidate_hash, 'qcow_client', 'current-tripleo') ]) self.assertTrue(mock_log_exception.called) self.assertTrue(mock_registries_client.called) self.assertTrue(mock_qcow_client.called) self.assertFalse(mock_dlrn_client.called)
def expand_dlrn_config(dlrn_config): """ Called by StageConfig.expand_config to expand the dlrn configuration part :param dlrn_config: The config dict :return: the expanded config dict, with dlrn information """ dlrn_config['promotions'] = {} db_commits = dlrn_config['server']['db_data']['commits'] # Every third commit in the group of commits, will be the last to # promote that name, and so it will be the one tied to the aggregate # hash that we'll have to promote promotions_map = dlrn_config['server']['db_data']['promotions_map'] # expands db commit information with associated promotions and full_hashes # and create promotions map commits = [] for index, db_commit in enumerate(db_commits): commit = copy.deepcopy(db_commit) promotion_name, promotion_alias = \ promotions_map.get(index, (None, None)) if promotion_name is not None: commit['name'] = promotion_name commit['full_hash'] = DlrnCommitDistroHash(source=commit).full_hash if promotion_alias is not None: dlrn_config['promotions'][promotion_alias] = \ commit commits.append(commit) dlrn_config['commits'] = commits # Create reverse promotion map from promotions map dlrn_config['rev_promotions'] = {} for promotion_alias, commit in dlrn_config['promotions'].items(): dlrn_config['rev_promotions'][commit['id']] = promotion_alias return dlrn_config
def setUp(self): setup_logging("promoter", logging.DEBUG) self.config = DlrnClientConfig(dlrnauth_username='******', dlrnauth_password='******', api_url="http://api.url", repo_url="file:///tmp") self.config.promotion_steps_map = { 'current-tripleo': 'tripleo-ci-testing' } self.client = DlrnClient(self.config) # set up fake job list with two different jobs self.api_jobs = [] for idx in range(2): api_job = Mock() api_job.job_id = "job{}".format(idx) api_job.timestamp = idx api_job.url = "https://dev/null" self.api_jobs.append(api_job) # Set up ApiException for api calls self.api_exception = ApiException() self.api_exception.body = '{"message": "message"}' self.api_exception.message = "message" self.api_exception.status = 404 self.api_exception.reason = "Not found" # Set up some ready to use hashes self.dlrn_hash_commitdistro1 = DlrnCommitDistroHash(commit_hash='a', distro_hash='b', component="comp1", timestamp=1) self.dlrn_hash_commitdistro2 = DlrnCommitDistroHash(commit_hash='c', distro_hash='d', component="comp2", timestamp=2) self.dlrn_hash_aggregate = DlrnAggregateHash(commit_hash='abc', distro_hash='def', aggregate_hash='ghjk', timestamp=1) self.promote_log_header = ("Dlrn promote '{}' from {} to {}:" "".format(self.dlrn_hash_commitdistro1, 'tripleo-ci-testing', 'current-tripleo')) # Set up the matrix of api_hashes to test commitdistrohash_valid_attrs = ['commit_hash', 'distro_hash', 'timestamp'] aggregatehash_valid_attrs = ['aggregate_hash', 'timestamp'] # Create commitdistro hash list self.api_hashes_commitdistro_ordered = [] for idx in range(3): api_hash = Mock(spec=commitdistrohash_valid_attrs) api_hash.commit_hash = "a{}".format(idx) api_hash.distro_hash = "b{}".format(idx) api_hash.timestamp = idx self.api_hashes_commitdistro_ordered.append(api_hash) # Create list with a duplicate by appending the last element in the # for again api_hashes_commitdistro_ordered_with_duplicates = \ copy.deepcopy(self.api_hashes_commitdistro_ordered) api_hashes_commitdistro_ordered_with_duplicates.append(api_hash) # Create an aggregate hash list self.api_hashes_aggregate_ordered = [] for idx in range(3): api_hash = Mock(spec=aggregatehash_valid_attrs) api_hash.aggregate_hash = "a{}".format(idx) api_hash.commit_hash = "b{}".format(idx) api_hash.distro_hash = "c{}".format(idx) api_hash.timestamp = idx self.api_hashes_aggregate_ordered.append(api_hash) # Create list with a duplicate by appending the last element in the # for again api_hashes_aggregate_ordered_with_duplicates = \ copy.deepcopy(self.api_hashes_aggregate_ordered) api_hashes_aggregate_ordered_with_duplicates.append(api_hash) # Create an unordered list by putting the last element in front # # CommitDistro api_hashes_commitdistro_unordered = \ copy.deepcopy(self.api_hashes_commitdistro_ordered) api_hash = api_hashes_commitdistro_unordered.pop(0) api_hashes_commitdistro_unordered.append(api_hash) # # Aggregate api_hashes_aggregate_unordered = \ copy.deepcopy(self.api_hashes_aggregate_ordered) api_hash = api_hashes_aggregate_unordered.pop(0) api_hashes_aggregate_unordered.append(api_hash) self.api_hashes_all_types_ordered = [ self.api_hashes_commitdistro_ordered, self.api_hashes_aggregate_ordered, ] self.api_hashes_all_types_unordered = [ api_hashes_commitdistro_unordered, api_hashes_aggregate_unordered, ] self.api_hashes_all_types_with_duplicates = [ api_hashes_commitdistro_ordered_with_duplicates, api_hashes_aggregate_ordered_with_duplicates, ]
def setUp(self): self.dlrn_hash_commitdistro = DlrnCommitDistroHash(commit_hash='abc', distro_hash='def', component="comp1", timestamp=1) self.dlrn_hash_commitdistro2 = DlrnCommitDistroHash(commit_hash='ghj', distro_hash='klm', component="comp2", timestamp=2) self.dlrn_hash_aggregate = DlrnAggregateHash(commit_hash='abc', distro_hash='def', aggregate_hash='ghjk', timestamp=1) self.hashes = [self.dlrn_hash_commitdistro, self.dlrn_hash_aggregate] self.temp_dir = tempfile.mkdtemp() self.versions_csv_dir = self.temp_dir config_defaults = PromoterLegacyConfigBase.defaults repo_url = "file://{}/".format(self.temp_dir) containers_list_base_url = "file://{}".format(self.temp_dir) containers_list_exclude_config_path = os.path.join( self.temp_dir, "exclude_file.yaml") config = type( "Config", (), { 'repo_url': repo_url, 'release': 'master', 'containers_list_base_url': containers_list_base_url, 'containers_list_path': config_defaults['containers_list_path'], 'containers': { 'build_method': 'tripleo', 'container_preffix': config_defaults['container_preffix'], 'containers_list_exclude_config': "file://{}".format(containers_list_exclude_config_path), } }) self.client = RepoClient(config) fieldnames = ("Project,Source Repo,Source Sha,Dist Repo,Dist Sha," "Status,Last Success Timestamp,Component,Pkg NVR" "").split(',') self.versions_csv_rows = [{ 'Project': "python-tripleo-common-tests-tempest", 'Source Repo': "https://git.openstack.org/openstack/tripleo-common" "-tempest-plugin", 'Source Sha': "f08b321392930b4255310b5aca8f704a32a79132", 'Dist Repo': "https://github.com/rdo-packages/tripleo-common-tempest" "-plugin-distgit-git", 'Dist Sha': "7ae014d193ad00ddb5007431665a0b3347c2c94b", "Status": "SUCCESS", "Last Success Timestamp": "1580861715", "Component": "tripleo", "Pkg NVR": "python-tripleo-common-tests-tempest-0.0.1-0.2020020500" "1526.f08b321.el8" }, { 'Project': 'openstack-tripleo-common', 'Source Repo': "https://git.openstack.org/openstack/tripleo-common", 'Source Sha': "163d4b3b4b211358512fa9ee7f49d9fb930ecd8f", 'Dist Repo': "https://github.com/rdo-packages/tripleo-common-distgit" "-git", 'Dist Sha': "22ed466781937e0506ad4afae0427338820c5601", "Status": "SUCCESS", "Last Success Timestamp": "1583519484", "Component": "tripleo", "Pkg NVR": "openstack-tripleo-common-12.1.1-0.20200306183249" ".163d4b3" ".el8" }] # Create containers files containers_file_dirname = os.path.dirname( config_defaults['containers_list_path']) containers_dir = os.path.join(self.temp_dir, self.versions_csv_rows[1]['Source Sha'], containers_file_dirname) # containers names coming from tripleo yaml file tripleo_containers_list = """ container_images: - image_source: tripleo imagename: quay.io/tripleomaster/openstack-base:current-tripleo - image_source: tripleo imagename: quay.io/tripleomaster/openstack-os:current-tripleo - image_source: tripleo imagename: quay.io/tripleomaster/openstack-aodh-base:current-tripleo """ os.makedirs(containers_dir) containers_file_path = \ os.path.join(containers_dir, os.path.basename(config_defaults[ 'containers_list_path'])) with open(containers_file_path, "w") as containers_file: containers_file.write(tripleo_containers_list) # containers names coming from overcloud_containers.yaml file overcloud_containers_list = """ container_images: - image_source: kolla imagename: quay.io/tripleotrain/centos-binary-nova-api:current-tripleo - image_source: kolla imagename: quay.io/tripleotrain/centos-binary-neutron-server:current-tripleo - image_source: kolla imagename: quay.io/tripleotrain/centos-binary-excluded:current-tripleo - image_source: kolla imagename: quay.io/tripleotrain/centos-binary-ovn-controller:current-tripleo """ overcloud_containers_file_path = \ os.path.join(containers_dir, 'overcloud_containers.yaml') with open(overcloud_containers_file_path, "w") as containers_file: containers_file.write(overcloud_containers_list) # containers names coming from yaml file for ussuri release tripleo_containers_list = """ container_images: - image_source: tripleo imagename: quay.io/tripleou/centos-binary-base:current-tripleo - image_source: tripleo imagename: quay.io/tripleou/centos-binary-os:current-tripleo - image_source: tripleo imagename: quay.io/tripleou/centos-binary-aodh-base:current-tripleo """ tripleo_containers_file_path = \ os.path.join(containers_dir, 'ussuri_containers.yaml') with open(tripleo_containers_file_path, "w") as containers_file: containers_file.write(tripleo_containers_list) # containers names coming from yaml file for queens release tripleo_containers_list = """ container_images: - imagename: quay.io/tripleoqueens/centos-binary-base:current-tripleo - imagename: quay.io/tripleoqueens/centos-binary-os:current-tripleo - imagename: quay.io/tripleoqueens/centos-binary-aodh-base:current-tripleo """ overcloud_containers_file_path = \ os.path.join(containers_dir, 'queens_containers.yaml') with open(overcloud_containers_file_path, "w") as containers_file: containers_file.write(tripleo_containers_list) # create exclude config excluded_containers = ['nonexisting', 'excluded'] exclude_config = { 'exclude_containers': { 'master': excluded_containers, 'train': excluded_containers, }, } with open(containers_list_exclude_config_path, "w") as exclude_file: exclude_file.write(yaml.safe_dump(exclude_config)) # Crate empty containers file empty_containers_dir = os.path.join(self.temp_dir, "abc", containers_file_dirname) os.makedirs(empty_containers_dir) empty_containers_file_path = \ os.path.join(empty_containers_dir, os.path.basename(config_defaults[ 'containers_list_path'])) with open(empty_containers_file_path, "w") as containers_file: pass # Create versions.csv files for dlrn_hash in self.hashes: dlrn_hash.label = "tripleo-ci-testing" versions_csv_dir = os.path.join(self.temp_dir, dlrn_hash.commit_dir) os.makedirs(versions_csv_dir) versions_csv_path = os.path.join(versions_csv_dir, "versions.csv") with open(versions_csv_path, "w") as versions_csv_file: csv_writer = csv.DictWriter(versions_csv_file, fieldnames=fieldnames) csv_writer.writeheader() for row in self.versions_csv_rows: csv_writer.writerow(row)