def test_commit_dir_component(self): for hash_type, source_types in hashes_test_cases.items(): dh = DlrnHash(source=source_types['object']['valid']) if hash_type == "commitdistro": dh.component = "component1" self.assertEqual(dh.commit_dir, "component/component1/ab/cd/abcd_defg")
def compare_tagged_image_hash(stage_info=None, **kwargs): """ Ensure that the promotion target images directory is a soft link to the promoted full hash images directory. :param stage_info: a dictionary containing parameter of the staging env :param kwargs: additional parameter for non-staged executions :return: None """ if stage_info is not None: # We are cheking a stage distro_name = stage_info['main']['distro_name'] distro_version = stage_info['main']['distro_version'] distro = "{}{}".format(distro_name, distro_version) release = stage_info['main']['release'] target_label = stage_info['dlrn']['promotion_target'] images_top_root = stage_info['overcloud_images']['root'] images_top_root = images_top_root.rstrip("/") images_root = os.path.join(images_top_root, distro, release, "rdo_trunk") promotion_link = os.path.join(images_root, target_label) candidate_dict = stage_info['dlrn']['promotions'][ 'promotion_candidate'] candidate_hash = DlrnHash(source=candidate_dict) promotion_dir = os.path.basename( os.path.join(images_root, candidate_hash.full_hash)) current_dict = stage_info['dlrn']['promotions']['currently_promoted'] current_hash = DlrnHash(source=current_dict) previous_dict = stage_info['dlrn']['promotions']['previously_promoted'] previous_label = previous_dict['name'] previous_link = os.path.join(images_root, previous_label) previous_dir = os.path.join(images_root, current_hash.full_hash) rl_module = os else: # We are checking production # FIXME(gerami) this branch needs revisiting images_base_dir = kwargs['image_base'] user = kwargs['user'] key_path = kwargs['key_path'] # promotion_target = args[3] # full_hash = args[4] # release = kwargs['release'] log.debug("Install required for nonstaging env") import pysftp sftp = pysftp.Connection(host=images_base_dir, username=user, private_key=key_path) # images_dir = os.path.join( # '/var/www/html/images', # release, 'rdo_trunk') rl_module = sftp check_links(rl_module, promotion_link, target_label, promotion_dir, previous_link=previous_link, previous_dir=previous_dir)
def test_no_old_hashes_returns_candidates(self, fetch_hashes_mock): old_hashes = [] hash1_dict = { 'timestamp': '1528085424', 'commit_hash': 'd1c5379369b24effdccfe5dde3e93bd21884eda5', 'distro_hash': 'cd4fb616ac3065794b8a9156bbe70ede3d77eff5' } hash1 = DlrnHash(source=hash1_dict) hash2_dict = { 'timestamp': '1528085434', 'commit_hash': 'd1c5379369b24effdccfe5dde3e93bd21884eda6', 'distro_hash': 'cd4fb616ac3065794b8a9156bbe70ede3d77eff6' } hash2 = DlrnHash(source=hash2_dict) candidate_hashes = [hash1, hash2] fetch_hashes_mock.side_effect = [candidate_hashes, old_hashes] obtained_hashes = self.promoter.select_candidates( 'candidate_label', 'target_label') fetch_hashes_mock.assert_has_calls([ mock.call('candidate_label', count=10), mock.call('target_label') ]) assert (obtained_hashes == candidate_hashes)
def test_dlrn_server(staged_env): """ General server testing, with a single promotion :param staged_env: The staged env fixture :return: None """ stage_info, promoter = staged_env commit = stage_info['dlrn']['promotions']['promotion_candidate'] candidate_label = commit['name'] promote_name = stage_info['dlrn']['promotion_target'] repo_url = stage_info['dlrn']['server']['repo_url'] client = promoter.dlrn_client dlrn_hash = DlrnHash(source=commit) dlrn_hash.label = candidate_label # TODO: Check db injection (needs sqlite3 import) # Check we can access dlrnapi try: client.promote(dlrn_hash, promote_name, candidate_label=candidate_label, create_previous=False) assert True, "Dlrn api responding" except ApiException as e: msg = "Exception when calling DefaultApi->api_promote_post: %s\n" % e assert False, msg # Check if we can access repo_url and get the versions file versions_url = os.path.join(repo_url, promote_name, 'versions.csv') try: url.urlopen(versions_url) assert True, "Versions file found" except IOError: assert False, "No versions file generated"
def test_commit_dir(self): for hash_type, source_types in hashes_test_cases.items(): dh = DlrnHash(source=source_types['object']['valid']) if hash_type == "commitdistro": self.assertEqual(dh.commit_dir, "ab/cd/abcd_defg") elif hash_type == "aggregate": dh.label = "label" self.assertEqual(dh.commit_dir, "label/ab/cd/abcd")
def test_create_from_object(self): # Prevent Mock class to identify as dict for hash_type, source_types in hashes_test_cases.items(): source_valid = source_types['object']['valid'] DlrnHash(source=source_valid) with self.assertRaises(DlrnHashError): source_invalid = source_types['object']['invalid'] DlrnHash(source=source_invalid)
def test_create_from_dict(self): for hash_type, source_types in hashes_test_cases.items(): dh = DlrnHash(source=source_types['dict']['valid']) if hash_type == "commitdistro": self.assertEqual(type(dh), DlrnCommitDistroHash) elif hash_type == "aggregate": self.assertEqual(type(dh), DlrnAggregateHash) with self.assertRaises(DlrnHashError): DlrnHash(source=source_types['dict']['invalid'])
def test_dump_to_params(self): for hash_type, source_types in hashes_test_cases.items(): params = Mock() dh = DlrnHash(source=source_types['object']['valid']) dh.dump_to_params(params) if hash_type == "commitdistro": self.assertEqual(params.commit_hash, dh.commit_hash) self.assertEqual(params.distro_hash, dh.distro_hash) elif hash_type == "aggregate": self.assertEqual(params.aggregate_hash, dh.aggregate_hash) self.assertEqual(params.timestamp, dh.timestamp)
def test_fetch_promotions_from_hash(self, fetch_hashes_mock, mock_log_debug): params = copy.deepcopy(self.client.hashes_params) param_dlrn_hash = \ DlrnHash(source=hashes_test_cases['commitdistro']['dict']['valid']) self.client.fetch_promotions_from_hash(param_dlrn_hash, count=1) param_dlrn_hash.dump_to_params(params) fetch_hashes_mock.assert_has_calls([ mock.call(params, count=1) ]) mock_log_debug.assert_has_calls([ mock.call("Fetching promotion hashes from hash %s", param_dlrn_hash) ])
def setUp(self): super(TestNamedHashes, self).setUp() dlrn_start_hash_dict = { 'timestamp': '1528085427', 'commit_hash': 'd221f4b33cf2763875fc6394902f7923108a34da', 'distro_hash': '70bdcd40eb5cc62e4762a7db0086e09f6edf2e5c' } dlrn_changed_hash_dict = { 'timestamp': '1528085529', 'commit_hash': 'e3d9fffbf82ec71deff60ba914f1db0e1625466a', 'distro_hash': 'Iba78e857267ac771d23919fbd1e3c9fcc5813c9' } self.dlrn_changed_hash = DlrnHash(source=dlrn_changed_hash_dict) self.dlrn_start_hash = DlrnHash(source=dlrn_start_hash_dict)
def test_overcloud_images(staged_env): """ Test that the staged hierarchy of overcloud images was created correctly :param staged_env: The staged_env fixture :return: None """ config, stage_info = staged_env # Check images subtree, all full hases should be there overcloud_images_path = config.qcow_server['root'] base_path = os.path.join( overcloud_images_path, config['distro'], config['release'], 'rdo_trunk', ) # Check stage_info has the requred attributes overcloud_images = stage_info['overcloud_images'] attributes = ['user', 'key_path', 'root'] for attribute in attributes: assert attribute in overcloud_images check_paths = [] existing_paths = [] for commit in stage_info['dlrn']['promotions'].values(): dlrn_hash = DlrnHash(source=commit) # check commit attributes are there hash_path = os.path.join(base_path, dlrn_hash.full_hash) check_paths.append(hash_path) # We don't block at the first path found, I want to see all # the missing paths try: os.stat(hash_path) existing_paths.append(hash_path) except OSError: raise assert check_paths == existing_paths # check if we have a leaf with the symbolic link # and the dir linked exists promotion_commit = \ stage_info['dlrn']['promotions']['currently_promoted'] promotion_name = promotion_commit['name'] promotion_link = os.path.join(base_path, promotion_name) promotion_target = os.readlink(promotion_link) # The fist commit is "the current promotion link" dlrn_hash = DlrnHash(source=promotion_commit) sample_path = \ os.path.join(base_path, dlrn_hash.full_hash) assert promotion_target == sample_path
def test_comparisons(self): non_dh = {} for hash_type, source_types in hashes_test_cases.items(): dh1 = DlrnHash(source=source_types['object']['valid']) dh2 = DlrnHash(source=source_types['object']['valid']) self.assertEqual(dh1, dh2) dh2 = DlrnHash(source=source_types['object']['different']) self.assertNotEqual(dh1, dh2) with self.assertRaises(TypeError): (dh1 == non_dh) with self.assertRaises(TypeError): (dh1 != non_dh) dh1 = DlrnHash(source=source_types['object']['valid_notimestamp']) dh2 = DlrnHash(source=source_types['object']['valid_notimestamp']) self.assertEqual(dh1, dh2)
def test_create_from_values(self): for hash_type, source_types in hashes_test_cases.items(): dh = DlrnHash(**source_types['dict']['valid']) if hash_type == "commitdistro": self.assertEqual(type(dh), DlrnCommitDistroHash) elif hash_type == 'aggregate': self.assertEqual(type(dh), DlrnAggregateHash)
def test_get_hash_from_component_success(self, mock_log_debug, mock_log_error): hash_info = { 'dt_commit': 1, 'timestamp': 1, 'commit_hash': "a", 'distro_hash': "b" } commits = {'commits': [hash_info]} dlrn_hash = DlrnHash(source=hash_info) tmp_dir = tempfile.mkdtemp() commit_yaml_path = os.path.join(tmp_dir, "commit.yaml") with open(commit_yaml_path, "w") as commit_yaml: commit_yaml.write(yaml.dump(commits)) base_url = "file://{}".format(tmp_dir) commit_url = "{}/{}".format(base_url, "commit.yaml") promotion_hash = self.client.get_hash_from_component("", "component1", base_url) self.assertFalse(mock_log_error.called) self.assertEqual(promotion_hash, dlrn_hash) mock_log_debug.assert_has_calls([ mock.call("%s base url url for component %s at %s", '', "component1", base_url), mock.call("%s commit info url for component %s at %s", '', "component1", commit_url), mock.call("%s component '%s' commit info: %s", '', "component1", hash_info), mock.call("%s adding '%s' to promotion list", '', promotion_hash) ]) shutil.rmtree(tmp_dir)
def test_single_promote(staged_env): stage_info, promoter = staged_env candidate_dict = stage_info['dlrn']['promotions']['promotion_candidate'] candidate_hash = DlrnHash(source=candidate_dict) promoter.promote(candidate_hash, "tripleo-ci-staging", "tripleo-ci-staging-promoted")
def query_container_registry_promotion(stage_info=None, **kwargs): """ Check that the hash containers have been pushed to the promotion registry with the promotion_target tag :param stage_info: a dictionary containing parameter of the staging env :param kwargs: additional parameter for non-staged executions :return: None """ if stage_info is not None: registry_target = stage_info['registries']['targets'][0]['host'] promotion_target = stage_info['dlrn']['promotion_target'] candidate_dict = stage_info['dlrn']['promotions'][ 'promotion_candidate'] candidate_hash = DlrnHash(source=candidate_dict) missing_images = [] no_ppc = stage_info.get('ppc_manifests', True) for line in stage_info['containers']['images']: name, tag = line.split(":") reg_url = "http://{}/v2/{}/manifests/{}".format( registry_target, name, tag) log.info("Checking for promoted container hash: %s", reg_url) try: url_lib.urlopen(reg_url) log.debug("%s:%s found", name, tag) except url_lib.HTTPError as ex: log.exception(ex) if no_ppc and '_ppc64le' in tag: log.info( "(expected - ppc manifests disabled)" "Image not found - %s", line) else: log.error("Image not found - %s", line) missing_images.append(line) # For the full_hash lines only, check that there is # an equivalent promotion_target entry if tag == candidate_hash.full_hash: reg_url = "http://{}/v2/{}/manifests/{}".format( registry_target, name, promotion_target) log.info("Checking for promoted container tag: %s", reg_url) try: url_lib.urlopen(reg_url) log.debug("%s:%s found", name, promotion_target) except url_lib.HTTPError as ex: log.exception(ex) log.error("Image with named tag not found - %s", line) promo_tgt_line = line.replace(candidate_hash.full_hash, promotion_target) missing_images.append(promo_tgt_line) else: # We are checking production # TODO: how to verify promoter containers log.info("Compare images tagged with hash and promotion target:") log.error("Not implemented") assert missing_images == [], "Images are missing {}".format(missing_images)
def test_properties(self): for hash_type, source_types in hashes_test_cases.items(): source = source_types['object']['valid'] dh = DlrnHash(source=source) if hash_type == "commitdistro": full_hash = "{}_{}".format(source.commit_hash, source.distro_hash[:8]) self.assertEqual(dh.full_hash, full_hash) elif hash_type == "aggregate": self.assertEqual(dh.full_hash, source.aggregate_hash)
def force_promote(promoter, args): try: candidate_hash = DlrnHash(source=args) except DlrnHashError: print("Unable to generate a valid candidate hash from the information" " provided") raise promoter.promote(candidate_hash, args.candidate_label, args.target_label)
def check_dlrn_promoted_hash(stage_info=None, **kwargs): """ Check that the the supposed hash has been promoted to promotion_target as recorded in DLRN. :param stage_info: a dictionary containing parameter of the staging env :param kwargs: additional parameter for non-staged executions :return: None """ if stage_info is not None: # We are checking a stage api_url = stage_info['dlrn']['server']['api_url'] promotion_target = stage_info['dlrn']['promotion_target'] candidate_commit = \ stage_info['dlrn']['promotions']['promotion_candidate'] candidate_hash = DlrnHash(source=candidate_commit) api_client = dlrnapi_client.ApiClient(host=api_url) dlrn_client = dlrnapi_client.DefaultApi(api_client=api_client) params = dlrnapi_client.PromotionQuery() params.limit = 1 params.promote_name = promotion_target else: # We are checking production server # TODO(gcerami) implement this branch ? pass try: api_response = dlrn_client.api_promotions_get(params) log.debug(api_response) except dlrnapi_client.rest.ApiException: log.error('Exception when calling api_promotions_get: %s', dlrnapi_client.rest.ApiException) raise error_msg = "No promotions for hash {}".format(candidate_hash) assert api_response != [], error_msg promotion_hash = DlrnHash(source=api_response[0]) error_message = ("Expected full hash: {}" " has not been promoted to {}." "".format(promotion_hash.full_hash, promotion_target)) conditions = [(promotion.promote_name == promotion_target) for promotion in api_response] assert any(conditions), error_message
def test_promote_success(self, check_output_mock, extra_vars_mock, unlink_mock, mock_log_info, mock_log_error): candidate_hash =\ DlrnHash(source=hashes_test_cases['aggregate']['dict']['valid']) target_label = "test" check_output_mock.return_value = "test log" self.client.promote(candidate_hash, target_label) self.assertTrue(check_output_mock.called) self.assertFalse(mock_log_error.called)
def test_promote_failure(self, check_output_mock, extra_vars_mock, unlink_mock, mock_log_info, mock_log_error): candidate_hash = \ DlrnHash(source=hashes_test_cases['aggregate']['dict']['valid']) target_label = "test" exception = subprocess.CalledProcessError(1, 2) exception.output = b"test" check_output_mock.side_effect = exception with self.assertRaises(PromotionError): self.client.promote(candidate_hash, target_label) self.assertTrue(mock_log_error.called)
def test_dlrn_promoted(staged_env): """ Checks that candidate hashes in dlrn have been promoted And others did not promote :param staged_env: The staged_env fixture :return: None """ stage_info = staged_env promotion_dict = stage_info['dlrn']['promotions']['promotion_candidate'] promotion_hash = DlrnHash(source=promotion_dict) with patch.object(dlrnapi_client.DefaultApi, 'api_promotions_get') as \ mock_api: # positive test promotion_hash.promote_name = "tripleo-ci-staging-promoted" mock_api.return_value = [promotion_hash] check_dlrn_promoted_hash(stage_info=stage_info) # negative test promotion_hash.promote_name = "tripleo-ci-staging" mock_api.return_value = [promotion_hash] with pytest.raises(AssertionError): check_dlrn_promoted_hash(stage_info=stage_info)
def setUp(self): super(TestQcowClient, self).setUp() self.client = self.promoter.qcow_client self.images_root = self.client.root self.images_dir = self.client.images_dir self.previous_hash_dir = os.path.join(self.images_dir, "efgh") self.current_hash_dir = os.path.join(self.images_dir, "dunno") self.candidate_hash_dir = os.path.join(self.images_dir, "abcd") self.target_label = "test-label" self.previous_target_label = "previous-{}".format(self.target_label) try: os.makedirs(self.candidate_hash_dir) except FileExistsError: pass try: os.makedirs(self.previous_hash_dir) except FileExistsError: pass self.valid_candidate_hash = \ DlrnHash(source=hashes_test_cases['aggregate']['dict']['valid']) self.missing_candidate_hash = \ DlrnHash(source=hashes_test_cases['aggregate']['dict']['different'])
def test_promote_containers(staged_env): """ Tests promotion of containers :param staged_env: The stage env fixture :return: None """ stage_info, promoter = staged_env candidate_dict = stage_info['dlrn']['promotions']['promotion_candidate'] candidate_hash = DlrnHash(source=candidate_dict) candidate_label = candidate_dict['name'] target_label = stage_info['dlrn']['promotion_target'] promoter.dlrn_client.fetch_current_named_hashes(store=True) promoter.promote(candidate_hash, candidate_label, target_label, allowed_clients=["registries_client"]) promoter_integration_checks.query_container_registry_promotion( stage_info=stage_info)
def test_containers(staged_env): """ Test that the containers are created and pushed correctly to local source registry :param staged_env: The staged_env fixture :return: None """ __, stage_info = staged_env # Check that all declare containers are realy pushed ppc64le_count = 0 found = [] source_registry = stage_info['registries']['source']['url'] for full_name in stage_info['containers']['images']: # Check if we only upload the containers for the promotion candidate # hash candidate_hash_dict =\ stage_info['dlrn']['promotions']['promotion_candidate'] candidate_hash = DlrnHash(source=candidate_hash_dict) if stage_info['main']['pipeline_type'] == "integration": assert type(candidate_hash) == DlrnAggregateHash elif stage_info['main']['pipeline_type'] == "single": assert type(candidate_hash) == DlrnCommitDistroHash assert candidate_hash.full_hash in full_name container, tag = full_name.split(':') reg_url = "{}/v2/{}/manifests/{}".format(source_registry, container, tag) if "_ppc64le" in tag: ppc64le_count += 1 try: url.urlopen(reg_url) found.append(full_name) except url.HTTPError: print("Missing container: {}".format(reg_url)) assert sorted(stage_info['containers']['images']) == sorted(found) # check that at least one image doesn't have ppc tagging # If all images have ppcle tagging, the should be at least one third # Check that they are way less images_count = len(stage_info['containers']['images']) ppc64le_ratio = float(ppc64le_count) / images_count assert ppc64le_ratio <= 1.0 / 3.0
def __init__(self, config): """ like many inits around the code, this loads the config and create shortcuts for the used configuration parameters :param config: The global stage config """ self.config = config self.dry_run = self.config['dry_run'] self.docker_client = docker.from_env() # Select only the stagedhash with the promotion candidate candidate_hash_dict = \ self.config.dlrn['promotions']['promotion_candidate'] self.candidate_hash = DlrnHash(source=candidate_hash_dict) self.containers_list_base = \ self.config.containers['containers_list_base'] self.containers_list_exclude_config = \ self.config.containers['containers_list_exclude_config'] self.containers_list_path = self.config.containers[ 'containers_list_path'] self.tripleo_commit_sha = self.config.containers['tripleo_commit_sha'] self.source_registry = None for registry in self.config.registries: if registry['type'] == "source": self.source_registry = registry break if self.source_registry is None: raise Exception("No source registry specified in configuration") self.base_image = BaseImage("promotion-stage-base:v1") if not self.dry_run: self.source_image = self.base_image.build() self.suffixes = self.config.containers['images-suffix'] self.distro = self.config['distro'] self.namespace = self.config.containers['namespace'] self.distro_name = self.config['distro_name'] self.pushed_images = [] self.containers_root = self.config.containers['root'] self.excluded_containers = ['nonexisting', 'excluded']
def test_no_candidates_returns_empty_list(self, fetch_hashes_mock): hash_dict = { 'timestamp': '1528085424', 'commit_hash': 'd1c5379369b24effdccfe5dde3e93bd21884eda5', 'distro_hash': 'cd4fb616ac3065794b8a9156bbe70ede3d77eff5' } hash = DlrnHash(source=hash_dict) old_hashes = [hash] candidate_hashes = [] fetch_hashes_mock.side_effect = [candidate_hashes, old_hashes] obtained_hashes = self.promoter.select_candidates( 'candidate_label', 'target_label') assert (len(obtained_hashes) == 0) fetch_hashes_mock.assert_has_calls([ mock.call('candidate_label', count=10), ])
def test_parse(staged_env): """ Checks if success and failure patterns are present in the logs. :param staged_env: The staged_env fixture :return: None """ stage_info = staged_env candidate_dict = stage_info['dlrn']['promotions']['promotion_candidate'] candidate_hash = DlrnHash(source=candidate_dict) if type(candidate_hash) is DlrnCommitDistroHash: with open(os.path.expanduser(stage_info['main']['log_file']), "w") as log_file: log_file.write(success_pattern_container_positive_single_pipeline) elif type(candidate_hash) is DlrnAggregateHash: with open(os.path.expanduser(stage_info['main']['log_file']), "w") as log_file: log_file.write( success_pattern_container_positive_integration_pipeline) parse_promotion_logs(stage_info=stage_info)
def create_hierarchy(self): """ Creates the basic hierarchy for all the hashes, and injects an empty image in the directory :return: None """ for commit in self.promotions.values(): dlrn_hash = DlrnHash(source=commit) image_name = "{}-image.tar.gz".format(dlrn_hash.full_hash) image_dir = os.path.join(self.images_root, dlrn_hash.full_hash) image_path = os.path.join(image_dir, image_name) try: os.mkdir(image_dir) self.log.info("Created image dir in %s", image_dir) except OSError: self.log.info("Reusing image in %s", image_path) self.log.info("Creating empty image in %s", image_dir) # This emulates a "touch" command with open(image_path, 'w'): pass
def test_promote_qcows(staged_env): """ Tests promotion of overcloud images :param staged_env: The stage env fixture :return: None """ stage_info, promoter = staged_env candidate_dict = stage_info['dlrn']['promotions']['promotion_candidate'] candidate_hash = DlrnHash(source=candidate_dict) if stage_info['main']['pipeline_type'] == "single": error_msg = "Single pipeline should promote a commit/distro hash" assert type(candidate_hash) == DlrnCommitDistroHash, error_msg else: error_msg = "Integration pipeline should promote an aggregate hash" assert type(candidate_hash) == DlrnAggregateHash, error_msg target_label = stage_info['dlrn']['promotion_target'] promoter.dlrn_client.fetch_current_named_hashes(store=True) promoter.qcow_client.promote(candidate_hash, target_label) promoter_integration_checks.compare_tagged_image_hash(stage_info=stage_info)