def setUp(self): """Instantiate a ``codecov.io`` mock HTTP server if needed. We define the **CODECOV_IO_BASE_URL** environment variable to communicate with our mock HTTP server rather than the real ``codecov.io`` server (see `~master/steps/publish_coverage_report`). We define the **CODECOV_IO_UPLOAD_TOKEN** environment variable to avoid to skip the step. It's the default behaviour if we don't give this variable to Eve. """ if not os.environ.get('CODECOV_IO_BASE_URL', None): self.codecov_io_server = CodecovIOMockServer() self.codecov_io_server.start() conf = { 'CODECOV_IO_BASE_URL': os.environ.get('CODECOV_IO_BASE_URL', self.codecov_io_server.url), 'CODECOV_IO_UPLOAD_TOKEN': os.environ.get('CODECOV_IO_UPLOAD_TOKEN', 'FAKETOKEN') } self.cluster = Cluster(extra_conf=conf) self.cluster.start() self.local_repo = self.cluster.clone() super(TestPublishCodeCoverage, self).setUp()
def test_virtual_builder(self): """Test the virtual builder API. Steps: - Start a cluster with 01 frontend and 01 backend. - Force a job. - Wait for the build to end. - Ensure we can use the virtual_builder API. - Ensure the virtual_builder properties are well set. - Stop it. """ with Cluster() as cluster: local_repo = cluster.clone() local_repo.push() cluster.api.force(branch=local_repo.branch) # Wait for the build to finish cluster.api.get_finished_build() premerge_build = cluster.api.get_finished_build('pre-merge') self.assertEqual(premerge_build['results'], SUCCESS) # Check build properties properties = cluster.api.get_build_properties(premerge_build) tags = [ properties['git_host'][0], properties['git_owner'][0], properties['git_slug'][0], properties['stage_name'][0], ] self.assertEqual(properties['virtual_builder_name'][0], 'pre-merge') self.assertEqual(properties['virtual_builder_tags'][0], tags)
def test_local_job_empty(self): # pylint: disable=no-self-use """Test local jobs with no job defined and absolute path. (useful for people who want to store job files in /etc) Steps: - Configure local jobs in decorator. - Check Eve can start (no error in setup). - Verify directory (test setup validation). """ cluster = Cluster() master = list(cluster._masters.values())[0] master.conf['LOCAL_JOBS_DIRPATH'] = '/dev/null' with cluster: cluster.sanity_check()
def test_force_parametrized_build(self): """Test forced build with parameters. Steps: - Spawn cluster with a parametrized force build scheduler. - Force a build with 2 parameters out of 5. - Check that the parameters are taken into account by reading the step's stdio log. """ conf = {'FORCE_BUILD_PARAM_COUNT': '5'} with Cluster(extra_conf=conf) as cluster: local_repo = cluster.clone() local_repo.push(yaml=SingleCommandYaml( 'echo The %(prop:color)s %(prop:vehicule)s')) buildset = cluster.api.force(branch=local_repo.branch, prop00_name='vehicule', prop00_value='submarine', prop01_name='color', prop01_value='yellow') self.assertEqual(buildset.result, 'success') child_build = buildset.buildrequest.build.children[ 0].buildrequest.build step = child_build.steps[-1] self.assertIn('The yellow submarine', step.rawlog('stdio'))
def test_patcher_branch_match(self): """Test that a branch skip is taken into account.""" PATCHER_DATA = { 'skip_branches': [ 'spam', 'egg', ], } conf = {'PATCHER_FILE_PATH': 'patcher.yml'} with Cluster(extra_conf=conf) as cluster: for master in cluster._masters.values(): master.add_conf_file(yaml_data=PATCHER_DATA, filename=os.path.join( master._base_path, 'patcher.yml')) repo = cluster.clone() repo.push(branch='spam-branch', yaml=SingleCommandYaml('exit 0')) cluster.api.webhook(repo) cluster.api.getw('/buildsets', get_params={ 'limit': 1, 'results': CANCELLED, }) buildset = cluster.api.force(branch=repo.branch) self.assertEqual(buildset.result, 'success') cluster.sanity_check()
class TestDistro(unittest.TestCase): def setUp(self): # Setting two backends to ensure this feature works with multiMaster self.cluster = Cluster(backends=1).start() self.local_repo = self.cluster.clone() def tearDown(self): self.cluster.stop() del self.cluster del self.local_repo def test_distro_implicit_call(self): """Test SetWorkerDistro and ensuring the step is implicitly called.""" steps = [{'ShellCommand': {'command': 'exit 0'}}] branch = {'default': {'stage': 'pre-merge'}} stage = {'pre-merge': { 'worker': {'type': 'local'}, 'steps': steps, }} self.local_repo.push(yaml=YamlFactory(branches=branch, stages=stage)) buildset = self.cluster.api.force(branch=self.local_repo.branch) build = buildset.buildrequest.build build.wait_for_finish() premerge = build.children[0].buildrequest.build # Just ensuring that the properties exists, and # that the default value has been overwritten assert premerge.properties['distribution_id'][0] != 'unknown' assert premerge.properties['distribution_version_id'][0] != 'unknown' def test_distro_non_existent_os_release_file(self): """Test SetWorkerDistro when os release file does not exist.""" steps = [{'SetWorkerDistro': {'osReleaseFilePath': '/does/not/exist'}}] branch = {'default': {'stage': 'pre-merge'}} stage = {'pre-merge': { 'worker': {'type': 'local'}, 'steps': steps, }} self.local_repo.push(yaml=YamlFactory(branches=branch, stages=stage)) buildset = self.cluster.api.force(branch=self.local_repo.branch) build = buildset.buildrequest.build build.wait_for_finish() premerge = build.children[0].buildrequest.build # Defaults to unknown when the os release file does not exist assert premerge.properties['distribution_id'][0] == 'unknown' assert premerge.properties['distribution_version_id'][0] == 'unknown'
def configure_local_jobs(self, master_ids=(0, )): """Test a local job on the frontend. The local directory is customized with a subdirectory. Steps: - Configure local job in decorator. - Check Eve can start (no error in setup). - Verify directories and files (test setup validation). - Check schedulers and builders are correct. """ cluster = Cluster() for master_id in master_ids: master = list(cluster._masters.values())[master_id] master.add_conf_file(yaml_data=PERIODIC_LOCAL_JOB, filename='local2/sub/periodic.yml') master.conf['LOCAL_JOBS_DIRPATH'] = 'local2/sub' path = os.path.join(master._base_path, 'local2/sub/periodic.yml') self.assertTrue(os.path.isfile(path)) with cluster: cluster.sanity_check() scheduler = cluster.api.get_scheduler( PERIODIC_LOCAL_JOB['scheduler']['name']) self.assertTrue(scheduler['enabled']) builder = cluster.api.get_builder( PERIODIC_LOCAL_JOB['builder']['name']) self.assertEqual(builder['description'], PERIODIC_LOCAL_JOB['builder']['description']) # let job trigger at least once buildset = cluster.api.getw( '/buildsets', get_params={ 'limit': 1, 'results': 0, # SUCCESS }) self.assertEqual( buildset['reason'], "The Periodic scheduler named " "'my-periodic-scheduler' triggered this build")
def test1_cluster_start_stop(self): """Test cluster start and stop. Steps: - Start a cluster with 01 frontend and 01 backend. - Check that there are no errors in logs. - Stop it. """ with Cluster() as cluster: cluster.sanity_check()
def test_nightly_build(self): """Test that a nightly build is well registred. does not launch it.""" cluster = Cluster() master = list(cluster._masters.values())[0] master.add_conf_file(yaml_data=NIGHTLY_LOCAL_JOB, filename='local2/sub/nightly.yml') master.conf['LOCAL_JOBS_DIRPATH'] = 'local2/sub' path = os.path.join(master._base_path, 'local2/sub/nightly.yml') self.assertTrue(os.path.isfile(path)) with cluster: cluster.sanity_check() scheduler = cluster.api.get_scheduler( NIGHTLY_LOCAL_JOB['scheduler']['name']) self.assertTrue(scheduler['enabled']) builder = cluster.api.get_builder( NIGHTLY_LOCAL_JOB['builder']['name']) self.assertEqual(builder['description'], NIGHTLY_LOCAL_JOB['builder']['description'])
def test_step_maxtime_too_long(self): """Test configurable step maxtimes too high.""" conf = {'MAX_STEP_DURATION': '1400'} with Cluster(extra_conf=conf) as cluster: local_repo = cluster.clone() local_repo.push(yaml=PreMerge(steps=[{ 'ShellCommand': { 'command': 'echo bad', 'maxtime': 10000, } }])) buildset = cluster.api.force(branch=local_repo.branch) self.assertEqual(buildset.result, 'exception')
def test_step_timeout_for_real(self): """Test short step timeouts.""" conf = {'MAX_STEP_DURATION': '1400'} with Cluster(extra_conf=conf) as cluster: local_repo = cluster.clone() local_repo.push(yaml=PreMerge(steps=[{ 'ShellCommand': { 'command': 'sleep 10', 'timeout': '1', } }])) buildset = cluster.api.force(branch=local_repo.branch) self.assertEqual(buildset.result, 'failure')
def test_step_timeout_invalid_type(self): """Test configurable step timeouts with invalid value.""" conf = {'MAX_STEP_DURATION': '1400'} with Cluster(extra_conf=conf) as cluster: local_repo = cluster.clone() local_repo.push(yaml=PreMerge(steps=[{ 'ShellCommand': { 'command': 'echo bad', 'timeout': 'a string', } }])) buildset = cluster.api.force(branch=local_repo.branch) self.assertEqual(buildset.result, 'exception')
def test2_bigger_cluster_start_stop(self): """Test addition of extra masters to a cluster. Steps: - Start a cluster with 01 frontend and 01 backend. - Add a frontend. - Add a backend. - Check that there are no errors in logs. - Stop it. """ with Cluster() as cluster: cluster.add_master('frontend').start() cluster.add_master('backend').start() cluster.sanity_check()
def test_force_stage_build(self): """Test forced build with forced stage. Steps: - Spawn cluster with a force build scheduler. - Force a build with a given stage name. - Check that the correct stage is triggered. """ with Cluster() as cluster: local_repo = cluster.clone() local_repo.push( yaml=YamlFactory(branches={ 'default': { 'stage': 'default_stage' }, }, stages={ 'default_stage': { 'worker': { 'type': 'local' }, 'steps': [{ 'ShellCommand': { 'command': 'exit 1' } }], }, 'another_stage': { 'worker': { 'type': 'local' }, 'steps': [{ 'ShellCommand': { 'command': 'echo "egg"' } }], }, })) buildset = cluster.api.force(branch=local_repo.branch, force_stage='another_stage') self.assertEqual(buildset.result, 'success') child_build = buildset.buildrequest.build.children[ 0].buildrequest.build step = child_build.steps[-1] self.assertIn('egg', step.rawlog('stdio'))
def test_janitor(self): """Test a cluster with the Janitor configurator. Steps: - Configure the Janitor to build every day and every minute. - Wait for the Janitor to do his job. - Ensure the build succeeded """ conf = { 'JANITOR_DAY': '*', 'JANITOR_HOUR': '*', 'JANITOR_MINUTE': '*', 'JANITOR_DAY_RETENTION': '1' } with Cluster(extra_conf=conf) as cluster: build = cluster.api.get_finished_build(JANITOR_NAME, timeout=120) self.assertEqual(build['results'], SUCCESS)
def test_cancel_non_tip_build(self): """Check that commits that are not on tip of branch are cancelled. Steps: - commit twice on a branch - send a webhook to notify the first commit - verify that a build is launched and cancelled immediately """ with Cluster() as cluster: repo = cluster.clone() repo.push(branch='spam', yaml=SingleCommandYaml('exit 0')) old_revision = repo.revision repo.push(branch='spam', yaml=SingleCommandYaml('exit 1')) cluster.webhook(repo, old_revision) build = cluster.api.get_finished_build() self.assertEqual(build['results'], CANCELLED)
def test_step_maxtime_property(self): """Test configurable step maxtime property.""" conf = {'MAX_STEP_DURATION': '9128'} with Cluster(extra_conf=conf) as cluster: local_repo = cluster.clone() local_repo.push(yaml=PreMerge(steps=[{ 'ShellCommand': { 'command': 'echo good', 'maxTime': '%(prop:max_step_duration)s', } }])) buildset = cluster.api.force(branch=local_repo.branch) self.assertEqual(buildset.result, 'success') build = buildset.buildrequest.build child = build.children[0].buildrequest.build self.assertEqual(child.result, 'success') self.assertEqual(child.properties['max_step_duration'][0], 9128)
def setUpClass(cls): """Start a cluster including a local Docker registry.""" cls.cluster = Cluster().start() cls.local_repo = cls.cluster.clone() steps = [{'ShellCommand': {'command': 'exit 0'}}] branch = {'default': {'stage': 'pre-merge'}} stage = {'pre-merge': { 'worker': {'type': 'local'}, 'steps': steps, }} cls.local_repo.push(yaml=YamlFactory(branches=branch, stages=stage)) buildset = cls.cluster.api.force(branch=cls.local_repo.branch) build = buildset.buildrequest.build build.wait_for_finish() cls.premerge = build.children[0].buildrequest.build cls.premerge_builder = cls.cluster.api.get( '/builders', {'name': 'pre-merge'})[0] cls.exp_builderid = cls.premerge_builder.get('builderid')
def test_worker_environ(self): """Test worker environment. Steps: - Spawn worker. - Check Eve environment variables are not setted in the worker. """ conf = {'FOO': 'bar'} with Cluster(extra_conf=conf) as cluster: local_repo = cluster.clone() local_repo.push(yaml=SingleCommandYaml('test -z "$FOO"')) buildset = cluster.api.force(branch=local_repo.branch) self.assertEqual(buildset.result, 'failure') child_build = \ buildset.buildrequest.build.children[0].buildrequest.build self.assertEqual(child_build.first_failing_step.name, 'shell') self.assertEqual(child_build.first_failing_step.state_string, "'test -z ...' (failure)")
def test_patcher_stage_match(self): """Test that a stage skip is taken into account.""" PATCHER_DATA = { 'skip_stages': [ 'pre-merge', ], } conf = {'PATCHER_FILE_PATH': 'patcher.yml'} with Cluster(extra_conf=conf) as cluster: for master in cluster._masters.values(): master.add_conf_file(yaml_data=PATCHER_DATA, filename=os.path.join( master._base_path, 'patcher.yml')) repo = cluster.clone() repo.push(branch='other-branch', yaml=PreMerge(steps=[ { 'ShellCommand': { 'name': 'step1', 'command': 'exit 0' } }, ])) cluster.api.webhook(repo) cluster.api.getw('/buildsets', get_params={ 'limit': 1, 'results': CANCELLED, }) buildset = cluster.api.force(branch=repo.branch) self.assertEqual(buildset.result, 'success') cluster.sanity_check()
def test_index_lock_failure(self): """Test a simple build failure on a cluster due to index.lock. Steps: - Start a cluster with 01 frontend and 01 backend. - Force a job. - Check that all the expected steps are there. - Stop it. """ conf = {'MAX_LOCAL_WORKERS': '1'} with Cluster(extra_conf=conf) as cluster: local_repo = cluster.clone() local_repo.push() for master_name in cluster._masters: if master_name.startswith('backend'): base_path = cluster._masters[master_name]._base_path git_path = ('{}/workers/lw000-test_suffix/bootstrap/' 'build/.git'.format(base_path)) local_repo.cmd('mkdir -p {}'.format(git_path)) local_repo.cmd('touch {}/index.lock'.format(git_path)) cluster.api.force(branch=local_repo.branch) # Check bootstrap bootstrap_build = cluster.api.get_finished_build() self.assertEqual(bootstrap_build['results'], FAILURE) bootstrap_steps = cluster.api.get_build_steps(bootstrap_build) step_names_and_descriptions = [(step['name'], step['state_string']) for step in bootstrap_steps] self.assertEqual( step_names_and_descriptions, [(u'worker_preparation', u'worker ready'), (u'set the bootstrap build number', u'Set'), (u'check index.lock', u"'test ! ...' (failure)")])
def test_patcher_branch_no_match(self): """Test that a step skip is taken into account.""" PATCHER_DATA = { 'skip_branches': [ 'spam', 'egg', ], 'skip_steps': [ 'step1', 'step3', ], 'skip_stages': [ 'bacon', ], } conf = {'PATCHER_FILE_PATH': 'patcher.yml'} with Cluster(extra_conf=conf) as cluster: for master in cluster._masters.values(): master.add_conf_file(yaml_data=PATCHER_DATA, filename=os.path.join( master._base_path, 'patcher.yml')) repo = cluster.clone() repo.push(branch='other-branch', yaml=PreMerge(steps=[ { 'ShellCommand': { 'name': 'step1', 'command': 'exit 0' } }, { 'ShellCommand': { 'name': 'step2', 'command': 'exit 0' } }, { 'ShellCommand': { 'name': 'step3', 'command': 'exit 0' } }, ])) cluster.api.webhook(repo) cluster.api.getw('/buildsets', get_params={ 'limit': 1, 'results': SUCCESS, }) buildset = cluster.api.force(branch=repo.branch) self.assertEqual(buildset.result, 'success') # Check pre-merge premerge_build = cluster.api.get_finished_build('pre-merge') premerge_steps = cluster.api.get_build_steps(premerge_build) step_names_and_descriptions = [(step['name'], step['state_string']) for step in premerge_steps] self.assertEqual( step_names_and_descriptions, [(u'worker_preparation', u'worker ready'), (u'prevent unuseful restarts', u"'[ $(expr ...'"), (u'set the artifacts private url', u"property 'artifacts_private_url' set"), (u'Check worker OS distribution', u'finished'), (u'Set the current builder id', u'finished'), (u'Set the current build url', u'finished'), (u'extract steps from yaml', u'finished'), (u'step1', u'Temporarily disabled (skipped)'), (u'step2', u"'exit 0'"), (u'step3', u'Temporarily disabled (skipped)')]) cluster.sanity_check()
def setUpClass(cls): conf = { 'ARTIFACTS_PREFIX': 'aprefix-', 'ARTIFACTS_PUBLIC_URL': 'https://foo.bar.baz' } cls.cluster = Cluster(extra_conf=conf).start()
def setUp(self): # Setting two backends to ensure this feature works with multiMaster self.cluster = Cluster(backends=2).start() self.local_repo = self.cluster.clone()
class TestCanStartBuild(unittest.TestCase): def setUp(self): # Setting two backends to ensure this feature works with multiMaster self.cluster = Cluster(backends=2).start() self.local_repo = self.cluster.clone() def tearDown(self): self.cluster.stop() del self.cluster del self.local_repo def test_can_start_one_build(self): """Check behaviour with one simultaneous build.""" file_path = os.path.join(mkdtemp(), 'file') command = """ test ! -f {file_path} && touch {file_path} && test -f {file_path} && sleep 5 && rm {file_path} """.format(file_path=file_path) steps = [{'ShellCommand': {'command': command}}] branch = {'default': {'stage': 'pre-merge'}} stage = { 'pre-merge': { 'worker': { 'type': 'local' }, 'steps': steps, 'simultaneous_builds': 1 } } self.local_repo.push(yaml=YamlFactory(branches=branch, stages=stage)) for _ in range(8): self.cluster.api.force(branch=self.local_repo.branch) builds = self.cluster.api.get_builds() while len(builds) != 8: builds = self.cluster.api.get_builds() buildids = [b['buildid'] for b in builds] for buildid in buildids: build = self.cluster.api.get_build_for_id(buildid) build.wait_for_finish() self.assertEqual(build.result, 'success') def test_can_start_few_builds(self): """Check can start build method behaviour with N builds.""" def get_running_builds(builds): return [ build for build in builds if 'complete' not in build or build['complete'] is False ] total_builds = 8 simultaneous_builds = 4 command = "echo Ola Mundo!" steps = [{'ShellCommand': {'command': command}}] branch = {'default': {'stage': 'pre-merge'}} stage = { 'pre-merge': { 'worker': { 'type': 'local' }, 'steps': steps, 'simultaneous_builds': simultaneous_builds } } self.local_repo.push(yaml=YamlFactory(branches=branch, stages=stage)) for _ in range(total_builds): self.cluster.api.force(branch=self.local_repo.branch) bootstrap = self.cluster.api.get_builds() # Wait for the pre-merge builder to be created for sleep in range(10): pre_merge = self.cluster.api.get('/builders', {'name': 'pre-merge'}) if sleep == 10 and not pre_merge: self.fail('pre-merge builder was not created') elif not pre_merge: time.sleep(sleep) else: pre_merge = get_running_builds(pre_merge) break while any(pre_merge) or any(bootstrap): assert len(pre_merge) <= simultaneous_builds pre_merge = get_running_builds( self.cluster.api.get_builds(builder='pre-merge')) bootstrap = get_running_builds(self.cluster.api.get_builds()) # Just so that we don't spam the API time.sleep(0.5) # Retrieving all builds to check that they all ran successfully pre_merge = self.cluster.api.get_builds(builder='pre-merge') self.assertEqual(len(pre_merge), total_builds) for build in pre_merge: self.assertEqual(build['results'], SUCCESS) def test_can_start_build_error_handling(self): """Ensure string parameters are ignored.""" steps = [{'ShellCommand': {'command': 'exit 0'}}] branch = {'default': {'stage': 'pre-merge'}} stage = { 'pre-merge': { 'worker': { 'type': 'local' }, 'steps': steps, 'simultaneous_builds': "string" } } self.local_repo.push(yaml=YamlFactory(branches=branch, stages=stage)) buildset = self.cluster.api.force(branch=self.local_repo.branch) buildset.wait_for_finish() self.assertEqual(buildset.result, 'success')
class TestPublishCodeCoverage(unittest.TestCase): """Test code coverage report publication step. ``PublishCodeCoverage`` is the generic buildbot step to use to publish several code coverage reports to an external service. At this moment, only ``codecov.io`` service is supported. These tests are two operating modes: - The first is to use an internal mock ``codecov.io`` server. - The second use the real ``codecov.io`` service. By default, in the automatic tests, we use the mock ``codecov.io`` server to avoid being dependent external services that could not be available in the future. To use the real ``codecov.io`` server, we need to define the environment variable **CODECOV_IO_UPLOAD_TOKEN** which is the ``codecov.io`` upload token of the repository given in *yaml/generate_coverage_report/main.yml* YAML file. """ def __init__(self, *args, **kwargs): super(TestPublishCodeCoverage, self).__init__(*args, **kwargs) self.codecov_io_server = None def setUp(self): """Instantiate a ``codecov.io`` mock HTTP server if needed. We define the **CODECOV_IO_BASE_URL** environment variable to communicate with our mock HTTP server rather than the real ``codecov.io`` server (see `~master/steps/publish_coverage_report`). We define the **CODECOV_IO_UPLOAD_TOKEN** environment variable to avoid to skip the step. It's the default behaviour if we don't give this variable to Eve. """ if not os.environ.get('CODECOV_IO_BASE_URL', None): self.codecov_io_server = CodecovIOMockServer() self.codecov_io_server.start() conf = { 'CODECOV_IO_BASE_URL': os.environ.get('CODECOV_IO_BASE_URL', self.codecov_io_server.url), 'CODECOV_IO_UPLOAD_TOKEN': os.environ.get('CODECOV_IO_UPLOAD_TOKEN', 'FAKETOKEN') } self.cluster = Cluster(extra_conf=conf) self.cluster.start() self.local_repo = self.cluster.clone() super(TestPublishCodeCoverage, self).setUp() def tearDown(self): """Stop the ``codecov.io`` mock HTTP server if needed. And restore old environment variables. """ super(TestPublishCodeCoverage, self).tearDown() if self.codecov_io_server: self.codecov_io_server.stop() self.codecov_io_server = None self.cluster.stop() def test_codecovio_success(self): """Test PublishCoverageReport success. If we use the mock HTTP server, we ensure that we execute the requests with the correct query parameters and headers, in accordance with the ``codecov.io`` API (see https://docs.codecov.io/v4.3.0/reference#upload). """ import xml.etree.cElementTree as ET coverage = ET.Element( 'coverage', { 'branch-rate': '0', 'line-rate': '0', 'timestamp': 'XXXXXXXXXXXXX', 'version': '4.3.4', }) sources = ET.SubElement(coverage, 'sources') ET.SubElement(sources, 'source').text = '/srv/test_codecov_io' packages = ET.SubElement(coverage, 'packages') pacakge = ET.SubElement(packages, 'package', { 'branch-rate': '0', 'complexity': '0', 'line-rate': '0', 'name': '.', }) classes = ET.SubElement(pacakge, 'classes') class_ = ET.SubElement( classes, 'class', { 'branch-rate': '0', 'complexity': '0', 'filename': 'test.py', 'line-rate': '0', 'name': 'test.py' }) # Based on https://raw.githubusercontent.com/cobertura/web/ # f0366e5e2cf18f111cbd61fc34ef720a6584ba02/htdocs/xml/coverage-03.dtd ET.SubElement(class_, 'methods') lines = ET.SubElement(class_, 'lines') for i in [3, 4, 5, 7, 8]: ET.SubElement(lines, 'line', {'hits': '0', 'number': str(i)}) report_file = '/tmp/coverage.xml' tree = ET.ElementTree(coverage) tree.write(report_file, encoding='utf-8', xml_declaration=True) self.local_repo.push(yaml=PreMerge(steps=[{ 'SetProperty': { 'name': 'set property my_revision', 'property': 'my_revision', 'value': '98f9379054719da6e7b5fd537b5a8e0ede096968', } }, { 'PublishCoverageReport': { 'repository': 'scality/test_codecov_io', 'revision': '%(prop:my_revision)s', 'filepaths': [report_file], 'branch': 'master', 'uploadName': 'ucheck', 'configFile': '.codecov.yml', } }])) buildset = self.cluster.api.force(branch=self.local_repo.branch) self.assertEqual(buildset.result, 'success') build = buildset.buildrequest.build child_buildsets = build.children self.assertEqual(len(child_buildsets), 1) child_build = child_buildsets[0].buildrequest.build self.assertEqual(child_build.result, 'success') if self.codecov_io_server is None: return # The method build.getUrl() returns for some reason an url with: # - the builder id of the main builder, and not the virtual_builder id # - a buildid linked to the main builder as well, not with the virtual_build id self.codecov_io_server.assert_request_received_with( ('POST', '/upload/v4', { 'commit': '98f9379054719da6e7b5fd537b5a8e0ede096968', 'token': 'FAKETOKEN', 'build': build.number, 'build_url': '{0}#builders/{1}/builds/{2}'.format(self.cluster.api.url, child_build.builderid, child_build.number), 'service': 'buildbot', 'branch': 'master', 'name': 'ucheck', 'slug': 'scality/test_codecov_io', 'yaml': '.codecov.yml', }, { 'Accept': 'text/plain', 'Content-Length': '0', }), ('PUT', '/s3/fake_report.txt', { 'AWSAccessKeyId': 'FAKEAWSACCESSKID', 'Expires': str(self.codecov_io_server.expires), 'Signature': 'FAKESIGNATURE', }, { 'Content-Length': str(os.path.getsize(report_file)), 'Content-Type': 'text/plain', 'x-amz-acl': 'public-read', 'x-amz-storage-class': 'REDUCED_REDUNDANCY', }))
def test_simple_success(self): """Test a simple build success on a cluster. Steps: - Start a cluster with 01 frontend and 01 backend. - Force a job. - Check that all the expected steps are there. - Stop it. """ with Cluster() as cluster: local_repo = cluster.clone() local_repo.push() cluster.api.force(branch=local_repo.branch) # Check bootstrap bootstrap_build = cluster.api.get_finished_build() self.assertEqual(bootstrap_build['results'], SUCCESS) bootstrap_steps = cluster.api.get_build_steps(bootstrap_build) step_names_and_descriptions = [(step['name'], step['state_string']) for step in bootstrap_steps] self.assertEqual( step_names_and_descriptions, [(u'worker_preparation', u'worker ready'), (u'set the bootstrap build number', u'Set'), (u'check index.lock', u"'test ! ...'"), (u'checkout git branch', u'update'), (u'cancel builds for commits that are not branch tips', u'CancelNonTipBuild'), (u'set the master_builddir property', u'Set'), (u'get the product version', u"property 'product_version' set"), (u'check the product version', u"'echo 0.0.0 ...'"), (u'read eve/main.yml', u'uploading main.yml'), (u'get the commit short_revision', u"property 'commit_short_revision' set"), (u'get the commit timestamp', u"property 'commit_timestamp' set"), (u'set the artifacts name', u"property 'artifacts_name' set"), (u'set the artifacts public url', u"property 'artifacts_public_url' set"), (u'get the API version', u'Set'), (u'prepare 1 stage(s)', u'finished'), (u'trigger', u'triggered pre-merge')]) # Check pre-merge premerge_build = cluster.api.get_finished_build('pre-merge') self.assertEqual(premerge_build['results'], SUCCESS) premerge_steps = cluster.api.get_build_steps(premerge_build) step_names_and_descriptions = [(step['name'], step['state_string']) for step in premerge_steps] self.assertEqual( step_names_and_descriptions, [(u'worker_preparation', u'worker ready'), (u'prevent unuseful restarts', u"'[ $(expr ...'"), (u'set the artifacts private url', u"property 'artifacts_private_url' set"), (u'Check worker OS distribution', u'finished'), (u'Set the current builder id', u'finished'), (u'Set the current build url', u'finished'), (u'extract steps from yaml', u'finished'), (u'shell', u"'exit 0'")]) # Check build properties properties = cluster.api.getw('/builds/{}'.format( bootstrap_build['buildid']), get_params={'property': '*'}) from pprint import pprint pprint(properties) # TODO: imagine useful tests with build properties self.assertEqual(properties['properties']['stage_name'][0], 'bootstrap') self.assertEqual(properties['properties']['reason'][0], 'force build') self.assertEqual(properties['properties']['reason'][1], 'Force Build Form')
def setUpClass(cls): cls.cluster = Cluster().start() print(cls.cluster.api.url)
def test_init(self): ctx = Cluster() self.assertIsNotNone(ctx)
def test_bootstrap_and_master_properties(self): """Check the properties on bootstrap build. Steps: - submit a build via webhook - verify that the build runs correctly - check the expected properties are set """ with Cluster() as cluster: repo = cluster.clone() repo.push(branch='spam', yaml=SingleCommandYaml('exit 0')) cluster.webhook(repo, repo.revision) build = cluster.api.get_finished_build() self.assertEqual(build['results'], SUCCESS) properties = cluster.api.get_build_properties(build) def check_prop(name, value=None, source=None): self.assertTrue(name in properties) if value: self.assertEqual(properties[name][0], value) if source: self.assertEqual(properties[name][1], source) check_prop('artifacts_name') check_prop('artifacts_public_url') check_prop('bootstrap', 1, 'set the bootstrap build number') check_prop('branch', 'spam', 'Build') check_prop('buildbot_version', '2.7.0') check_prop('builddir') check_prop('buildername', 'bootstrap', 'Builder') check_prop('buildnumber', 1, 'Build') check_prop('commit_short_revision') check_prop('commit_timestamp') check_prop('conf_path') check_prop('eve_api_version') check_prop('git_host', 'mock', 'Builder') check_prop('git_owner', 'repo_owner', 'Builder') check_prop('git_slug', 'test', 'Builder') check_prop('got_revision', repo.revision, 'Git') check_prop('master_builddir') check_prop('max_step_duration', 14400, 'Builder') check_prop('product_version', '0.0.0') check_prop('project', 'TEST', 'Build') check_prop('reason', 'branch updated', 'Scheduler') check_prop('repository', 'http://www.example.com/', 'Build') check_prop('revision', repo.revision, 'Build') check_prop('scheduler', 'bootstrap-scheduler', 'Scheduler') check_prop('stage_name', 'bootstrap', 'Builder') check_prop('start_time') check_prop('workername') master_build = cluster.api.get_finished_build('pre-merge') properties = cluster.api.get_build_properties(master_build) check_prop('artifacts_name') check_prop('artifacts_public_url') check_prop('bootstrap', 1, 'set the bootstrap build number') check_prop('bootstrap_reason', 'branch updated', 'BuildOrder') check_prop('branch', 'spam', 'Build') check_prop('buildbot_version', '2.7.0') check_prop('builddir') check_prop('buildername', 'local-test_suffix', 'Builder') check_prop('buildnumber', 1, 'Build') check_prop('commit_short_revision') check_prop('commit_timestamp') check_prop('conf_path') check_prop('eve_api_version') check_prop('git_host', 'mock', 'Builder') check_prop('git_owner', 'repo_owner', 'Builder') check_prop('git_slug', 'test', 'Builder') check_prop('got_revision', repo.revision, 'Git') check_prop('master_builddir') check_prop('max_step_duration', 14400, 'Builder') check_prop('product_version', '0.0.0') check_prop('project', 'TEST', 'Build') check_prop('reason', 'pre-merge (triggered by bootstrap)') check_prop('repository', 'http://www.example.com/', 'Build') check_prop('revision', repo.revision, 'Build') check_prop('scheduler', 'local-test_suffix', 'Scheduler') check_prop('stage_name', 'pre-merge', 'BuildOrder') check_prop('start_time') check_prop('workername')