def test_build_invalid_from_source(self):
     with self.assertRaises(DlrnHashError):
         source = hashes_test_cases['commitdistro']['dict']['invalid']
         DlrnCommitDistroHash(source=source)
     with self.assertRaises(DlrnHashError):
         source = hashes_test_cases['aggregate']['dict']['invalid']
         DlrnAggregateHash(source=source)
    def test_get_civotes_info_aggregate(self):
        dlrn_hash = DlrnAggregateHash(commit_hash='a',
                                      distro_hash='b',
                                      aggregate_hash='c',
                                      timestamp=1)

        get_detail = self.client.get_civotes_info(dlrn_hash)
        urlparse(get_detail)
        detail = ("Check results at: http://api.url/api/civotes_agg_detail.html"
                  "?ref_hash=c")
        self.assertEqual(get_detail, detail)
 def test_fetch_jobs_success_aggregate(self, api_agg_status_get_mock,
                                       api_repo_status_get_mock,
                                       mock_log_error):
     api_agg_status_get_mock.return_value = self.api_jobs
     dlrn_hash = DlrnAggregateHash(commit_hash='a',
                                   distro_hash='b',
                                   aggregate_hash='c',
                                   timestamp=1)
     job_list = self.client.fetch_jobs(dlrn_hash)
     self.assertEqual(len(job_list), 2)
     self.assertEqual(job_list, ["job0", "job1"])
     self.assertFalse(mock_log_error.called)
     self.assertTrue(api_agg_status_get_mock.called)
     self.assertFalse(api_repo_status_get_mock.called)
 def test_build_valid_from_source(self):
     for hash_type, source_types in hashes_test_cases.items():
         values = source_types['dict']['valid']
         if hash_type == "commitdistro":
             dh = DlrnCommitDistroHash(source=values)
             self.assertEqual(dh.commit_hash,
                              source_types['dict']['valid']['commit_hash'])
             self.assertEqual(dh.distro_hash,
                              source_types['dict']['valid']['distro_hash'])
         elif hash_type == "aggregate":
             aggregate_hash = source_types['dict']['valid'][
                 'aggregate_hash']
             dh = DlrnAggregateHash(source=values)
             self.assertEqual(dh.aggregate_hash, aggregate_hash)
     self.assertEqual(dh.timestamp,
                      source_types['dict']['valid']['timestamp'])
 def test_vote_success_aggregate(self, mock_api_report, mock_log_info):
     dlrn_hash = DlrnAggregateHash(commit_hash='a',
                                   distro_hash='b',
                                   aggregate_hash='c',
                                   timestamp=1)
     params = copy.deepcopy(self.client.report_params)
     params.aggregate_hash = dlrn_hash.aggregate_hash
     params.job_id = 'job1'
     params.notes = None
     params.success = str(True)
     params.timestamp = dlrn_hash.timestamp
     params.url = "https://job.url"
     mock_api_report.return_value = True
     str_params = str(params).replace('\n', ' ').replace('\r', ' ')
     self.client.vote(dlrn_hash, params.job_id, params.url,
                      params.success)
     mock_log_info.assert_has_calls([
         mock.call('Dlrn voting success: %s for job %s with parameters %s',
                   'True', 'job1', str_params),
         mock.call('Dlrn voted success: %s for job %s on hash %s', 'True',
                   'job1', dlrn_hash),
     ])
    def setUp(self):
        setup_logging("promoter", logging.DEBUG)
        self.config = DlrnClientConfig(dlrnauth_username='******',
                                       dlrnauth_password='******',
                                       api_url="http://api.url",
                                       repo_url="file:///tmp")
        self.config.promotion_steps_map = {
            'current-tripleo': 'tripleo-ci-testing'
        }
        self.client = DlrnClient(self.config)

        # set up fake job list with two different jobs
        self.api_jobs = []
        for idx in range(2):
            api_job = Mock()
            api_job.job_id = "job{}".format(idx)
            api_job.timestamp = idx
            api_job.url = "https://dev/null"
            self.api_jobs.append(api_job)

        # Set up ApiException for api calls
        self.api_exception = ApiException()
        self.api_exception.body = '{"message": "message"}'
        self.api_exception.message = "message"
        self.api_exception.status = 404
        self.api_exception.reason = "Not found"

        # Set up some ready to use hashes
        self.dlrn_hash_commitdistro1 = DlrnCommitDistroHash(commit_hash='a',
                                                            distro_hash='b',
                                                            component="comp1",
                                                            timestamp=1)
        self.dlrn_hash_commitdistro2 = DlrnCommitDistroHash(commit_hash='c',
                                                            distro_hash='d',
                                                            component="comp2",
                                                            timestamp=2)
        self.dlrn_hash_aggregate = DlrnAggregateHash(commit_hash='abc',
                                                     distro_hash='def',
                                                     aggregate_hash='ghjk',
                                                     timestamp=1)
        self.promote_log_header = ("Dlrn promote '{}' from {} to {}:"
                                   "".format(self.dlrn_hash_commitdistro1,
                                             'tripleo-ci-testing',
                                             'current-tripleo'))
        # Set up the matrix of api_hashes to test
        commitdistrohash_valid_attrs = ['commit_hash', 'distro_hash',
                                        'timestamp']
        aggregatehash_valid_attrs = ['aggregate_hash', 'timestamp']

        # Create commitdistro hash list
        self.api_hashes_commitdistro_ordered = []
        for idx in range(3):
            api_hash = Mock(spec=commitdistrohash_valid_attrs)
            api_hash.commit_hash = "a{}".format(idx)
            api_hash.distro_hash = "b{}".format(idx)
            api_hash.timestamp = idx
            self.api_hashes_commitdistro_ordered.append(api_hash)

        # Create list with a duplicate by appending the last element in the
        # for again
        api_hashes_commitdistro_ordered_with_duplicates = \
            copy.deepcopy(self.api_hashes_commitdistro_ordered)
        api_hashes_commitdistro_ordered_with_duplicates.append(api_hash)

        # Create an aggregate hash list
        self.api_hashes_aggregate_ordered = []
        for idx in range(3):
            api_hash = Mock(spec=aggregatehash_valid_attrs)
            api_hash.aggregate_hash = "a{}".format(idx)
            api_hash.commit_hash = "b{}".format(idx)
            api_hash.distro_hash = "c{}".format(idx)
            api_hash.timestamp = idx
            self.api_hashes_aggregate_ordered.append(api_hash)

        # Create list with a duplicate by appending the last element in the
        # for again
        api_hashes_aggregate_ordered_with_duplicates = \
            copy.deepcopy(self.api_hashes_aggregate_ordered)
        api_hashes_aggregate_ordered_with_duplicates.append(api_hash)

        # Create an unordered list by putting the last element in front
        #
        # CommitDistro
        api_hashes_commitdistro_unordered = \
            copy.deepcopy(self.api_hashes_commitdistro_ordered)
        api_hash = api_hashes_commitdistro_unordered.pop(0)
        api_hashes_commitdistro_unordered.append(api_hash)
        #
        # Aggregate
        api_hashes_aggregate_unordered = \
            copy.deepcopy(self.api_hashes_aggregate_ordered)
        api_hash = api_hashes_aggregate_unordered.pop(0)
        api_hashes_aggregate_unordered.append(api_hash)

        self.api_hashes_all_types_ordered = [
            self.api_hashes_commitdistro_ordered,
            self.api_hashes_aggregate_ordered,
        ]
        self.api_hashes_all_types_unordered = [
            api_hashes_commitdistro_unordered,
            api_hashes_aggregate_unordered,
        ]
        self.api_hashes_all_types_with_duplicates = [
            api_hashes_commitdistro_ordered_with_duplicates,
            api_hashes_aggregate_ordered_with_duplicates,
        ]
    def setUp(self):
        self.dlrn_hash_commitdistro = DlrnCommitDistroHash(commit_hash='abc',
                                                           distro_hash='def',
                                                           component="comp1",
                                                           timestamp=1)
        self.dlrn_hash_commitdistro2 = DlrnCommitDistroHash(commit_hash='ghj',
                                                            distro_hash='klm',
                                                            component="comp2",
                                                            timestamp=2)
        self.dlrn_hash_aggregate = DlrnAggregateHash(commit_hash='abc',
                                                     distro_hash='def',
                                                     aggregate_hash='ghjk',
                                                     timestamp=1)
        self.hashes = [self.dlrn_hash_commitdistro, self.dlrn_hash_aggregate]
        self.temp_dir = tempfile.mkdtemp()
        self.versions_csv_dir = self.temp_dir
        config_defaults = PromoterLegacyConfigBase.defaults

        repo_url = "file://{}/".format(self.temp_dir)
        containers_list_base_url = "file://{}".format(self.temp_dir)
        containers_list_exclude_config_path = os.path.join(
            self.temp_dir, "exclude_file.yaml")
        config = type(
            "Config", (), {
                'repo_url': repo_url,
                'release': 'master',
                'containers_list_base_url': containers_list_base_url,
                'containers_list_path':
                config_defaults['containers_list_path'],
                'containers': {
                    'build_method':
                    'tripleo',
                    'container_preffix':
                    config_defaults['container_preffix'],
                    'containers_list_exclude_config':
                    "file://{}".format(containers_list_exclude_config_path),
                }
            })
        self.client = RepoClient(config)
        fieldnames = ("Project,Source Repo,Source Sha,Dist Repo,Dist Sha,"
                      "Status,Last Success Timestamp,Component,Pkg NVR"
                      "").split(',')

        self.versions_csv_rows = [{
            'Project':
            "python-tripleo-common-tests-tempest",
            'Source Repo':
            "https://git.openstack.org/openstack/tripleo-common"
            "-tempest-plugin",
            'Source Sha':
            "f08b321392930b4255310b5aca8f704a32a79132",
            'Dist Repo':
            "https://github.com/rdo-packages/tripleo-common-tempest"
            "-plugin-distgit-git",
            'Dist Sha':
            "7ae014d193ad00ddb5007431665a0b3347c2c94b",
            "Status":
            "SUCCESS",
            "Last Success Timestamp":
            "1580861715",
            "Component":
            "tripleo",
            "Pkg NVR":
            "python-tripleo-common-tests-tempest-0.0.1-0.2020020500"
            "1526.f08b321.el8"
        }, {
            'Project':
            'openstack-tripleo-common',
            'Source Repo':
            "https://git.openstack.org/openstack/tripleo-common",
            'Source Sha':
            "163d4b3b4b211358512fa9ee7f49d9fb930ecd8f",
            'Dist Repo':
            "https://github.com/rdo-packages/tripleo-common-distgit"
            "-git",
            'Dist Sha':
            "22ed466781937e0506ad4afae0427338820c5601",
            "Status":
            "SUCCESS",
            "Last Success Timestamp":
            "1583519484",
            "Component":
            "tripleo",
            "Pkg NVR":
            "openstack-tripleo-common-12.1.1-0.20200306183249"
            ".163d4b3"
            ".el8"
        }]

        # Create containers files
        containers_file_dirname = os.path.dirname(
            config_defaults['containers_list_path'])
        containers_dir = os.path.join(self.temp_dir,
                                      self.versions_csv_rows[1]['Source Sha'],
                                      containers_file_dirname)

        # containers names coming from tripleo yaml file
        tripleo_containers_list = """
container_images:
- image_source: tripleo
  imagename: quay.io/tripleomaster/openstack-base:current-tripleo
- image_source: tripleo
  imagename: quay.io/tripleomaster/openstack-os:current-tripleo
- image_source: tripleo
  imagename: quay.io/tripleomaster/openstack-aodh-base:current-tripleo
"""
        os.makedirs(containers_dir)
        containers_file_path = \
            os.path.join(containers_dir,
                         os.path.basename(config_defaults[
                                              'containers_list_path']))
        with open(containers_file_path, "w") as containers_file:
            containers_file.write(tripleo_containers_list)

        # containers names coming from overcloud_containers.yaml file
        overcloud_containers_list = """
container_images:
- image_source: kolla
  imagename: quay.io/tripleotrain/centos-binary-nova-api:current-tripleo
- image_source: kolla
  imagename: quay.io/tripleotrain/centos-binary-neutron-server:current-tripleo
- image_source: kolla
  imagename: quay.io/tripleotrain/centos-binary-excluded:current-tripleo
- image_source: kolla
  imagename: quay.io/tripleotrain/centos-binary-ovn-controller:current-tripleo
"""
        overcloud_containers_file_path = \
            os.path.join(containers_dir, 'overcloud_containers.yaml')
        with open(overcloud_containers_file_path, "w") as containers_file:
            containers_file.write(overcloud_containers_list)

        # containers names coming from yaml file for ussuri release
        tripleo_containers_list = """
container_images:
- image_source: tripleo
  imagename: quay.io/tripleou/centos-binary-base:current-tripleo
- image_source: tripleo
  imagename: quay.io/tripleou/centos-binary-os:current-tripleo
- image_source: tripleo
  imagename: quay.io/tripleou/centos-binary-aodh-base:current-tripleo
"""
        tripleo_containers_file_path = \
            os.path.join(containers_dir, 'ussuri_containers.yaml')
        with open(tripleo_containers_file_path, "w") as containers_file:
            containers_file.write(tripleo_containers_list)

        # containers names coming from yaml file for queens release
        tripleo_containers_list = """
container_images:
- imagename: quay.io/tripleoqueens/centos-binary-base:current-tripleo
- imagename: quay.io/tripleoqueens/centos-binary-os:current-tripleo
- imagename: quay.io/tripleoqueens/centos-binary-aodh-base:current-tripleo
"""
        overcloud_containers_file_path = \
            os.path.join(containers_dir, 'queens_containers.yaml')
        with open(overcloud_containers_file_path, "w") as containers_file:
            containers_file.write(tripleo_containers_list)

        # create exclude config

        excluded_containers = ['nonexisting', 'excluded']
        exclude_config = {
            'exclude_containers': {
                'master': excluded_containers,
                'train': excluded_containers,
            },
        }
        with open(containers_list_exclude_config_path, "w") as exclude_file:
            exclude_file.write(yaml.safe_dump(exclude_config))

        # Crate empty containers file
        empty_containers_dir = os.path.join(self.temp_dir, "abc",
                                            containers_file_dirname)
        os.makedirs(empty_containers_dir)
        empty_containers_file_path = \
            os.path.join(empty_containers_dir,
                         os.path.basename(config_defaults[
                                              'containers_list_path']))
        with open(empty_containers_file_path, "w") as containers_file:
            pass

        # Create versions.csv files
        for dlrn_hash in self.hashes:
            dlrn_hash.label = "tripleo-ci-testing"
            versions_csv_dir = os.path.join(self.temp_dir,
                                            dlrn_hash.commit_dir)
            os.makedirs(versions_csv_dir)
            versions_csv_path = os.path.join(versions_csv_dir, "versions.csv")
            with open(versions_csv_path, "w") as versions_csv_file:
                csv_writer = csv.DictWriter(versions_csv_file,
                                            fieldnames=fieldnames)
                csv_writer.writeheader()
                for row in self.versions_csv_rows:
                    csv_writer.writerow(row)