Example #1
0
    def expand(self, job, max_executors, test_stats_from=None):
        test_stats, avg_test_time = self.get_test_stats(test_stats_from
                                                        or self.project.slug)

        groups = shard(self.data['tests'],
                       max_executors,
                       test_stats,
                       avg_test_time,
                       normalize_object_name=self._normalize_test_segments)

        for weight, test_list in groups:
            future_command = FutureCommand(
                script=self.data['cmd'].format(test_names=' '.join(test_list)),
                path=self.data.get('path'),
                env=self.data.get('env'),
                artifacts=self.data.get('artifacts'),
            )
            artifact_search_path = self.data.get('artifact_search_path')
            artifact_search_path = artifact_search_path if artifact_search_path else None
            future_jobstep = FutureJobStep(
                label=self.data.get('label') or future_command.label,
                commands=[future_command],
                data={
                    'weight': weight,
                    'tests': test_list,
                    'shard_count': len(groups),
                    'artifact_search_path': artifact_search_path,
                },
            )
            yield future_jobstep
Example #2
0
 def expand(self, job, max_executors, **kwargs):
     for cmd_data in self.data['commands']:
         # TODO: group commands with jobsteps so as to respect max_executors
         future_command = FutureCommand(**cmd_data)
         future_jobstep = FutureJobStep(
             label=cmd_data.get('label') or future_command.label,
             commands=[future_command],
         )
         yield future_jobstep
Example #3
0
 def create_replacement_jobstep(self, step):
     if not step.data.get('expanded', False):
         return self._setup_jobstep(step.phase, step.job, replaces=step)
     future_commands = map(FutureCommand.from_command, step.commands)
     future_jobstep = FutureJobStep(step.label, commands=future_commands)
     # we skip adding setup and teardown commands because these will already
     # be present in the old, failed JobStep.
     new_jobstep = self.create_expanded_jobstep(step, step.phase, future_jobstep,
                                skip_setup_teardown=True)
     db.session.flush()
     step.replacement_id = new_jobstep.id
     db.session.add(step)
     db.session.commit()
     sync_job_step.delay_if_needed(
         step_id=new_jobstep.id.hex,
         task_id=new_jobstep.id.hex,
         parent_task_id=new_jobstep.job.id.hex,
     )
     return new_jobstep
Example #4
0
    def test_create_replacement_jobstep_expanded_no_node(self, get_vcs):
        build = self.create_build(self.create_project())
        job = self.create_job(build)
        jobphase = self.create_jobphase(job, label='foo')
        jobstep = self.create_jobstep(jobphase)

        new_jobphase = self.create_jobphase(job, label='bar')

        vcs = mock.Mock(spec=Vcs)
        vcs.get_buildstep_clone.return_value = 'git clone https://example.com'
        get_vcs.return_value = vcs

        future_jobstep = FutureJobStep(
            label='test',
            commands=[
                FutureCommand('echo 1'),
                FutureCommand('echo "foo"\necho "bar"', path='subdir'),
            ],
            data={
                'weight': 1,
                'forceInfraFailure': True
            },
        )

        buildstep = self.get_buildstep(cluster='foo')
        fail_jobstep = buildstep.create_expanded_jobstep(
            jobstep, new_jobphase, future_jobstep)

        fail_jobstep.result = Result.infra_failed
        fail_jobstep.status = Status.finished
        fail_jobstep.node = None
        db.session.add(fail_jobstep)
        db.session.commit()

        new_jobstep = buildstep.create_replacement_jobstep(fail_jobstep)
        # new jobstep should still be part of same job/phase
        assert new_jobstep.job == job
        assert new_jobstep.phase == fail_jobstep.phase
        # make sure .steps actually includes the new jobstep
        assert len(fail_jobstep.phase.steps) == 2
        # make sure replacement id is correctly set
        assert fail_jobstep.replacement_id == new_jobstep.id
        assert new_jobstep.data.get('avoid_node') is None
Example #5
0
    def test_create_replacement_jobstep_expanded(self, get_vcs):
        build = self.create_build(self.create_project())
        job = self.create_job(build)
        jobphase = self.create_jobphase(job, label='foo')
        jobstep = self.create_jobstep(jobphase)

        new_jobphase = self.create_jobphase(job, label='bar')

        vcs = mock.Mock(spec=Vcs)
        vcs.get_buildstep_clone.return_value = 'git clone https://example.com'
        get_vcs.return_value = vcs

        future_jobstep = FutureJobStep(
            label='test',
            commands=[
                FutureCommand('echo 1'),
                FutureCommand('echo "foo"\necho "bar"', path='subdir'),
            ],
            data={
                'weight': 1,
                'forceInfraFailure': True,
                'targets': ['//A:test', '//B:test']
            },
        )

        buildstep = self.get_buildstep(cluster='foo')
        fail_jobstep = buildstep.create_expanded_jobstep(
            jobstep, new_jobphase, future_jobstep)

        fail_jobstep.result = Result.infra_failed
        fail_jobstep.status = Status.finished
        fail_jobstep.node = self.create_node(label='ip-127-0-0-1')
        db.session.add(fail_jobstep)
        db.session.commit()

        new_jobstep = buildstep.create_replacement_jobstep(fail_jobstep)
        # new jobstep should still be part of same job/phase
        assert new_jobstep.job == job
        assert new_jobstep.phase == fail_jobstep.phase
        # make sure .steps actually includes the new jobstep
        assert len(fail_jobstep.phase.steps) == 2
        # make sure replacement id is correctly set
        assert fail_jobstep.replacement_id == new_jobstep.id
        assert new_jobstep.data['avoid_node'] == 'ip-127-0-0-1'

        # make sure targets are copied over
        assert len(new_jobstep.targets) == 2
        assert set([t.name for t in new_jobstep.targets
                    ]) == set(['//A:test', '//B:test'])

        # we want the replacement jobstep to have the same attributes the
        # original jobstep would be expected to after expand_jobstep()
        assert new_jobstep.data['expanded'] is True
        assert new_jobstep.data['weight'] == 1
        assert new_jobstep.cluster == 'foo'
        # make sure non-whitelisted attributes aren't copied over
        assert 'forceInfraFailure' not in new_jobstep.data

        commands = new_jobstep.commands

        assert len(commands) == 5

        idx = 0
        assert commands[idx].script == 'git clone https://example.com'
        assert commands[idx].cwd == ''
        assert commands[idx].type == CommandType.infra_setup
        assert commands[idx].artifacts == []
        assert commands[idx].env == DEFAULT_ENV
        assert commands[idx].order == idx

        # skip blacklist removal command
        idx += 1

        idx += 1
        assert commands[idx].script == 'echo "hello world 2"'
        assert commands[idx].cwd == '/usr/test/1'
        assert commands[idx].type == CommandType.setup
        assert tuple(commands[idx].artifacts) == ('artifact1.txt',
                                                  'artifact2.txt')
        assert commands[idx].env['PATH'] == '/usr/test/1'
        for k, v in DEFAULT_ENV.items():
            if k != 'PATH':
                assert commands[idx].env[k] == v
        assert commands[idx].order == idx

        idx += 1
        assert commands[idx].label == 'echo 1'
        assert commands[idx].script == 'echo 1'
        assert commands[idx].order == idx
        assert commands[idx].cwd == DEFAULT_PATH
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV

        idx += 1
        assert commands[idx].label == 'echo "foo"'
        assert commands[idx].script == 'echo "foo"\necho "bar"'
        assert commands[idx].order == idx
        assert commands[idx].cwd == './source/subdir'
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV
Example #6
0
    def test_create_expanded_jobstep(self, get_vcs):
        build = self.create_build(self.create_project())
        job = self.create_job(build)
        jobphase = self.create_jobphase(job, label='foo')
        jobstep = self.create_jobstep(jobphase)

        new_jobphase = self.create_jobphase(job, label='bar')

        vcs = mock.Mock(spec=Vcs)
        vcs.get_buildstep_clone.return_value = 'git clone https://example.com'
        get_vcs.return_value = vcs

        future_jobstep = FutureJobStep(
            label='test',
            commands=[
                FutureCommand('echo 1'),
                FutureCommand('echo "foo"\necho "bar"', path='subdir'),
            ],
        )

        buildstep = self.get_buildstep(cluster='foo')
        with mock.patch.object(
                buildstep,
                '_create_targets_for_jobstep') as mock_create_targets:
            new_jobstep = buildstep.create_expanded_jobstep(
                jobstep, new_jobphase, future_jobstep)

        mock_create_targets.assert_called_once_with(new_jobstep)

        db.session.flush()

        assert new_jobstep.data['expanded'] is True
        assert new_jobstep.cluster == 'foo'

        commands = new_jobstep.commands

        assert len(commands) == 5

        idx = 0
        assert commands[idx].script == 'git clone https://example.com'
        assert commands[idx].cwd == ''
        assert commands[idx].type == CommandType.infra_setup
        assert commands[idx].artifacts == []
        assert commands[idx].env == DEFAULT_ENV
        assert commands[idx].order == idx

        # skip blacklist removal command
        idx += 1

        idx += 1
        assert commands[idx].script == 'echo "hello world 2"'
        assert commands[idx].cwd == '/usr/test/1'
        assert commands[idx].type == CommandType.setup
        assert tuple(commands[idx].artifacts) == ('artifact1.txt',
                                                  'artifact2.txt')
        assert commands[idx].env['PATH'] == '/usr/test/1'
        for k, v in DEFAULT_ENV.items():
            if k != 'PATH':
                assert commands[idx].env[k] == v
        assert commands[idx].order == idx

        idx += 1
        assert commands[idx].label == 'echo 1'
        assert commands[idx].script == 'echo 1'
        assert commands[idx].order == idx
        assert commands[idx].cwd == DEFAULT_PATH
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV

        idx += 1
        assert commands[idx].label == 'echo "foo"'
        assert commands[idx].script == 'echo "foo"\necho "bar"'
        assert commands[idx].order == idx
        assert commands[idx].cwd == './source/subdir'
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV
Example #7
0
    def expand(self, job, max_executors, test_stats_from=None):
        target_stats, avg_time = self.get_target_stats(test_stats_from
                                                       or self.project.slug)

        affected_targets = self.data['affected_targets']
        unaffected_targets = self.data['unaffected_targets']
        all_targets = affected_targets + unaffected_targets
        statsreporter.stats().set_gauge(
            '{}_bazel_affected_targets_count'.format(self.project.slug),
            len(affected_targets))
        statsreporter.stats().set_gauge(
            '{}_bazel_all_targets_count'.format(self.project.slug),
            len(all_targets))
        to_shard = all_targets

        # NOTE: null values for selective testing policy implies `disabled`
        if job.build.selective_testing_policy is SelectiveTestingPolicy.enabled:
            to_shard = affected_targets
            for target in unaffected_targets:
                # TODO(naphat) should we check if the target exists in the parent revision?
                # it should be impossible for it not to exist by our collect-targets script
                target_object = BazelTarget(
                    job=job,
                    name=target,
                    result_source=ResultSource.from_parent,
                )
                db.session.add(target_object)

        excluded_targets = self.data.get('excluded_targets')
        if excluded_targets:
            for target in excluded_targets:
                target_object = BazelTarget(
                    job=job,
                    name=target,
                    result=Result.skipped,
                    status=Status.finished,
                )
                db.session.add(target_object)
                target_message = BazelTargetMessage(
                    target=target_object,
                    text='This target was excluded by a tag.')
                db.session.add(target_message)

        messages = self.data.get('messages')
        if messages:
            for text in messages:
                message = BuildMessage(build_id=job.build_id, text=text)
                db.session.add(message)

        groups = shard(to_shard, max_executors, target_stats, avg_time)

        for weight, target_list in groups:
            future_command = FutureCommand(
                script=self.data['cmd'].format(
                    target_names=' '.join(target_list)),
                path=self.data.get('path'),
                env=self.data.get('env'),
                artifacts=self.data.get('artifacts'),
            )
            data = {
                'weight': weight,
                'targets': target_list,
                'shard_count': len(groups),
                'artifact_search_path': self.data['artifact_search_path'],
            }
            if 'dependency_map' in self.data:
                data['dependency_map'] = self.data['dependency_map']
            future_jobstep = FutureJobStep(
                label=self.data.get('label') or future_command.label,
                commands=[future_command],
                data=data,
            )
            yield future_jobstep
    def test_simple_expander(self, mock_get_expander,
                             mock_get_build_step_for_job):
        project = self.create_project()
        build = self.create_build(project)
        job = self.create_job(build)
        jobphase = self.create_jobphase(job)
        jobstep = self.create_jobstep(jobphase, data={
            'max_executors': 10,
        })
        plan = self.create_plan(project, label='test')
        self.create_step(plan)
        jobplan = self.create_job_plan(job, plan)
        command = self.create_command(jobstep,
                                      type=CommandType.collect_tests,
                                      status=Status.in_progress)

        def dummy_create_expanded_jobstep(jobstep, new_jobphase,
                                          future_jobstep):
            return future_jobstep.as_jobstep(new_jobphase)

        dummy_expander = Mock(spec=Expander)
        dummy_expander.expand.return_value = [
            FutureJobStep(
                label='test',
                commands=[
                    FutureCommand(script='echo 1', ),
                    FutureCommand(script='echo "foo"\necho "bar"', )
                ],
            )
        ]
        dummy_expander.default_phase_name.return_value = 'dummy'
        mock_get_expander.return_value.return_value = dummy_expander
        mock_buildstep = Mock(spec=BuildStep)
        mock_buildstep.create_expanded_jobstep.side_effect = dummy_create_expanded_jobstep

        mock_get_build_step_for_job.return_value = jobplan, mock_buildstep

        path = '/api/0/commands/{0}/'.format(command.id.hex)

        # missing output
        resp = self.client.post(path, data={
            'status': 'finished',
        })
        assert resp.status_code == 400, resp.data

        mock_get_expander.reset_mock()

        # valid params
        resp = self.client.post(path,
                                data={
                                    'status': 'finished',
                                    'output': '{"foo": "bar"}',
                                })
        assert resp.status_code == 200, resp.data

        mock_get_expander.assert_called_once_with(command.type)
        mock_get_expander.return_value.assert_called_once_with(
            project=project,
            data={'foo': 'bar'},
        )
        dummy_expander.validate.assert_called_once_with()
        dummy_expander.expand.assert_called_once_with(
            job=job,
            max_executors=10,
            test_stats_from=mock_buildstep.get_test_stats_from.return_value)

        phase2 = JobPhase.query.filter(
            JobPhase.job_id == job.id,
            JobPhase.id != jobphase.id,
        ).first()
        assert phase2.label == 'dummy'
        assert phase2.status == Status.queued

        new_jobstep = phase2.current_steps[0]
        assert new_jobstep.label == 'test'