Пример #1
0
    def iter_all_commands(self, job):
        source = job.source
        repo = source.repository
        vcs = repo.get_vcs()
        if vcs is not None:
            yield FutureCommand(
                script=vcs.get_buildstep_clone(
                    source, self.repo_path, self.clean,
                    self.debug_config.get('repo_cache_dir')),
                env=self.env,
                type=CommandType.infra_setup,
            )

            if source.patch:
                yield FutureCommand(
                    script=vcs.get_buildstep_patch(source, self.repo_path),
                    env=self.env,
                    type=CommandType.infra_setup,
                )

            for command in self.other_repo_clone_commands:
                yield command

        blacklist_remove_path = os.path.join(self.custom_bin_path(),
                                             'blacklist-remove')
        yield FutureCommand(
            script=blacklist_remove_path + ' "' +
            job.project.get_config_path() + '"',
            path=self.repo_path,
            env=self.env,
            type=CommandType.infra_setup,
        )

        for command in self.commands:
            yield command
Пример #2
0
    def expand(self, job, max_executors, test_stats_from=None):
        test_stats, avg_test_time = self.get_test_stats(test_stats_from
                                                        or self.project.slug)

        groups = shard(self.data['tests'],
                       max_executors,
                       test_stats,
                       avg_test_time,
                       normalize_object_name=self._normalize_test_segments)

        for weight, test_list in groups:
            future_command = FutureCommand(
                script=self.data['cmd'].format(test_names=' '.join(test_list)),
                path=self.data.get('path'),
                env=self.data.get('env'),
                artifacts=self.data.get('artifacts'),
            )
            artifact_search_path = self.data.get('artifact_search_path')
            artifact_search_path = artifact_search_path if artifact_search_path else None
            future_jobstep = FutureJobStep(
                label=self.data.get('label') or future_command.label,
                commands=[future_command],
                data={
                    'weight': weight,
                    'tests': test_list,
                    'shard_count': len(groups),
                    'artifact_search_path': artifact_search_path,
                },
            )
            yield future_jobstep
Пример #3
0
 def get_future_commands(self, env, commands, artifacts):
     """Create future commands which are later created as comands.
     See models/command.py.
     """
     return map(
         lambda command: FutureCommand(
             command['script'], artifacts=artifacts, env=env), commands)
Пример #4
0
 def expand(self, job, max_executors, **kwargs):
     for cmd_data in self.data['commands']:
         # TODO: group commands with jobsteps so as to respect max_executors
         future_command = FutureCommand(**cmd_data)
         future_jobstep = FutureJobStep(
             label=cmd_data.get('label') or future_command.label,
             commands=[future_command],
         )
         yield future_jobstep
Пример #5
0
    def test_create_replacement_jobstep_expanded_no_node(self, get_vcs):
        build = self.create_build(self.create_project())
        job = self.create_job(build)
        jobphase = self.create_jobphase(job, label='foo')
        jobstep = self.create_jobstep(jobphase)

        new_jobphase = self.create_jobphase(job, label='bar')

        vcs = mock.Mock(spec=Vcs)
        vcs.get_buildstep_clone.return_value = 'git clone https://example.com'
        get_vcs.return_value = vcs

        future_jobstep = FutureJobStep(
            label='test',
            commands=[
                FutureCommand('echo 1'),
                FutureCommand('echo "foo"\necho "bar"', path='subdir'),
            ],
            data={
                'weight': 1,
                'forceInfraFailure': True
            },
        )

        buildstep = self.get_buildstep(cluster='foo')
        fail_jobstep = buildstep.create_expanded_jobstep(
            jobstep, new_jobphase, future_jobstep)

        fail_jobstep.result = Result.infra_failed
        fail_jobstep.status = Status.finished
        fail_jobstep.node = None
        db.session.add(fail_jobstep)
        db.session.commit()

        new_jobstep = buildstep.create_replacement_jobstep(fail_jobstep)
        # new jobstep should still be part of same job/phase
        assert new_jobstep.job == job
        assert new_jobstep.phase == fail_jobstep.phase
        # make sure .steps actually includes the new jobstep
        assert len(fail_jobstep.phase.steps) == 2
        # make sure replacement id is correctly set
        assert fail_jobstep.replacement_id == new_jobstep.id
        assert new_jobstep.data.get('avoid_node') is None
Пример #6
0
    def _other_repo_clone_commands(self, other_repos):
        # type: (Optional[List[Dict[str, str]]]) -> List[FutureCommand]
        """
        Parses other_repos config and returns a list of FutureCommands
        that will clone said repos.
        """
        commands = []  # type: List[FutureCommand]
        if other_repos is None:
            return commands
        if not isinstance(other_repos, list):
            raise ValueError("other_repos must be a list!")
        for repo in other_repos:
            if not isinstance(repo, dict):
                raise ValueError('Each repo should be a dict')
            if not repo.get('repo'):
                raise ValueError("Each other_repo must specify a repo")
            if not repo.get('path'):
                raise ValueError("Each other_repo must specify a path")

            repo_vcs = None  # type: Type[Vcs]

            if repo.get('backend') == 'hg':
                repo_vcs = MercurialVcs
                revision = repo.get('revision', 'default')
                base_url = current_app.config['MERCURIAL_DEFAULT_BASE_URI']
            else:
                repo_vcs = GitVcs
                revision = repo.get('revision', 'origin/master')
                base_url = current_app.config['GIT_DEFAULT_BASE_URI']

            # check if the repo is a full url already or just a repo name (like changes.git)
            if '@' in repo['repo'] or '://' in repo['repo']:
                remote_url = repo['repo']
            elif not base_url:
                raise ValueError(
                    "Repo %s is not a full URL but no base URL is configured" %
                    repo['repo'])
            else:
                remote_url = base_url + repo['repo']

            commands.append(
                FutureCommand(
                    script=repo_vcs.get_clone_command(
                        remote_url, repo['path'], revision, self.clean,
                        self.debug_config.get('repo_cache_dir')),
                    env=self.env,
                    type=CommandType.infra_setup,
                ))
        return commands
Пример #7
0
    def test_create_replacement_jobstep_expanded(self, get_vcs):
        build = self.create_build(self.create_project())
        job = self.create_job(build)
        jobphase = self.create_jobphase(job, label='foo')
        jobstep = self.create_jobstep(jobphase)

        new_jobphase = self.create_jobphase(job, label='bar')

        vcs = mock.Mock(spec=Vcs)
        vcs.get_buildstep_clone.return_value = 'git clone https://example.com'
        get_vcs.return_value = vcs

        future_jobstep = FutureJobStep(
            label='test',
            commands=[
                FutureCommand('echo 1'),
                FutureCommand('echo "foo"\necho "bar"', path='subdir'),
            ],
            data={
                'weight': 1,
                'forceInfraFailure': True,
                'targets': ['//A:test', '//B:test']
            },
        )

        buildstep = self.get_buildstep(cluster='foo')
        fail_jobstep = buildstep.create_expanded_jobstep(
            jobstep, new_jobphase, future_jobstep)

        fail_jobstep.result = Result.infra_failed
        fail_jobstep.status = Status.finished
        fail_jobstep.node = self.create_node(label='ip-127-0-0-1')
        db.session.add(fail_jobstep)
        db.session.commit()

        new_jobstep = buildstep.create_replacement_jobstep(fail_jobstep)
        # new jobstep should still be part of same job/phase
        assert new_jobstep.job == job
        assert new_jobstep.phase == fail_jobstep.phase
        # make sure .steps actually includes the new jobstep
        assert len(fail_jobstep.phase.steps) == 2
        # make sure replacement id is correctly set
        assert fail_jobstep.replacement_id == new_jobstep.id
        assert new_jobstep.data['avoid_node'] == 'ip-127-0-0-1'

        # make sure targets are copied over
        assert len(new_jobstep.targets) == 2
        assert set([t.name for t in new_jobstep.targets
                    ]) == set(['//A:test', '//B:test'])

        # we want the replacement jobstep to have the same attributes the
        # original jobstep would be expected to after expand_jobstep()
        assert new_jobstep.data['expanded'] is True
        assert new_jobstep.data['weight'] == 1
        assert new_jobstep.cluster == 'foo'
        # make sure non-whitelisted attributes aren't copied over
        assert 'forceInfraFailure' not in new_jobstep.data

        commands = new_jobstep.commands

        assert len(commands) == 5

        idx = 0
        assert commands[idx].script == 'git clone https://example.com'
        assert commands[idx].cwd == ''
        assert commands[idx].type == CommandType.infra_setup
        assert commands[idx].artifacts == []
        assert commands[idx].env == DEFAULT_ENV
        assert commands[idx].order == idx

        # skip blacklist removal command
        idx += 1

        idx += 1
        assert commands[idx].script == 'echo "hello world 2"'
        assert commands[idx].cwd == '/usr/test/1'
        assert commands[idx].type == CommandType.setup
        assert tuple(commands[idx].artifacts) == ('artifact1.txt',
                                                  'artifact2.txt')
        assert commands[idx].env['PATH'] == '/usr/test/1'
        for k, v in DEFAULT_ENV.items():
            if k != 'PATH':
                assert commands[idx].env[k] == v
        assert commands[idx].order == idx

        idx += 1
        assert commands[idx].label == 'echo 1'
        assert commands[idx].script == 'echo 1'
        assert commands[idx].order == idx
        assert commands[idx].cwd == DEFAULT_PATH
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV

        idx += 1
        assert commands[idx].label == 'echo "foo"'
        assert commands[idx].script == 'echo "foo"\necho "bar"'
        assert commands[idx].order == idx
        assert commands[idx].cwd == './source/subdir'
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV
Пример #8
0
    def test_create_expanded_jobstep(self, get_vcs):
        build = self.create_build(self.create_project())
        job = self.create_job(build)
        jobphase = self.create_jobphase(job, label='foo')
        jobstep = self.create_jobstep(jobphase)

        new_jobphase = self.create_jobphase(job, label='bar')

        vcs = mock.Mock(spec=Vcs)
        vcs.get_buildstep_clone.return_value = 'git clone https://example.com'
        get_vcs.return_value = vcs

        future_jobstep = FutureJobStep(
            label='test',
            commands=[
                FutureCommand('echo 1'),
                FutureCommand('echo "foo"\necho "bar"', path='subdir'),
            ],
        )

        buildstep = self.get_buildstep(cluster='foo')
        with mock.patch.object(
                buildstep,
                '_create_targets_for_jobstep') as mock_create_targets:
            new_jobstep = buildstep.create_expanded_jobstep(
                jobstep, new_jobphase, future_jobstep)

        mock_create_targets.assert_called_once_with(new_jobstep)

        db.session.flush()

        assert new_jobstep.data['expanded'] is True
        assert new_jobstep.cluster == 'foo'

        commands = new_jobstep.commands

        assert len(commands) == 5

        idx = 0
        assert commands[idx].script == 'git clone https://example.com'
        assert commands[idx].cwd == ''
        assert commands[idx].type == CommandType.infra_setup
        assert commands[idx].artifacts == []
        assert commands[idx].env == DEFAULT_ENV
        assert commands[idx].order == idx

        # skip blacklist removal command
        idx += 1

        idx += 1
        assert commands[idx].script == 'echo "hello world 2"'
        assert commands[idx].cwd == '/usr/test/1'
        assert commands[idx].type == CommandType.setup
        assert tuple(commands[idx].artifacts) == ('artifact1.txt',
                                                  'artifact2.txt')
        assert commands[idx].env['PATH'] == '/usr/test/1'
        for k, v in DEFAULT_ENV.items():
            if k != 'PATH':
                assert commands[idx].env[k] == v
        assert commands[idx].order == idx

        idx += 1
        assert commands[idx].label == 'echo 1'
        assert commands[idx].script == 'echo 1'
        assert commands[idx].order == idx
        assert commands[idx].cwd == DEFAULT_PATH
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV

        idx += 1
        assert commands[idx].label == 'echo "foo"'
        assert commands[idx].script == 'echo "foo"\necho "bar"'
        assert commands[idx].order == idx
        assert commands[idx].cwd == './source/subdir'
        assert commands[idx].type == CommandType.default
        assert tuple(commands[idx].artifacts) == tuple(DEFAULT_ARTIFACTS)
        assert commands[idx].env == DEFAULT_ENV
Пример #9
0
    def __init__(self,
                 commands=None,
                 repo_path=None,
                 path=None,
                 env=None,
                 artifacts=DEFAULT_ARTIFACTS,
                 release=DEFAULT_RELEASE,
                 max_executors=10,
                 cpus=DEFAULT_CPUS,
                 memory=DEFAULT_MEMORY_MB,
                 clean=True,
                 debug_config=None,
                 test_stats_from=None,
                 cluster=None,
                 other_repos=None,
                 artifact_search_path=None,
                 use_path_in_artifact_name=False,
                 artifact_suffix=None,
                 **kwargs):
        """
        Constructor for DefaultBuildStep.

        Args:
            commands: list of commands that should be run. Run in the order given. Required.
            repo_path: The path to check out the repo to. Can be relative or absolute.
            path: The default path in which commands will be run. Can be absolute or
                relative to repo_path. If only one of repo_path and path is specified,
                both will be set to the same thing.
            cpus: How many cpus to limit the container to (not applicable for basic)
            memory: How much memory to limit the container to (not applicable for basic)
            clean: controls if the repository should be cleaned before
                tests are run.
                Defaults to true, because False may be unsafe; it may be
                useful to set to False if snapshots are in use and they
                intentionally leave useful incremental build products in the
                repository.
            debug_config: A dictionary of config options for either debugging
                or hacky features. In some cases these are passed through to
                changes-client, in other cases they change some behaviour on
                the server. Supported fields:
                  - infra_failures: this should be a dictionary and is used to
                    force infrastructure failures in builds. The keys of this
                    dictionary refer to the phase (possible values are
                    'primary' and 'expanded'), and the values are the
                    probabilities with which a JobStep in that phase will fail.
                    An example:
                      "debug_config": {"infra_failures": {"primary": 0.5}}
                    This will then cause an infra failure in the primary
                    JobStep with probability 0.5.
                  - prelaunch_script: passed to changes-client
                  - bind_mounts: passed to changes-client
                  - prefer_artifactstore: used in sync_job_step to select
                    artifact source when multiple sources are available
                  - repo_cache_dir: a directory on the build machine containing
                    local caches of repos; if the repository for this build is
                    found in repo_cache_dir, we may clone/pull from it rather
                    than from the normal remote repository. We currently don't
                    do anything to ensure that the cache is up to date;
                    configure e.g. a pre-launch script to do that.
            test_stats_from: project to get test statistics from, or
                None (the default) to use this project.  Useful if the
                project runs a different subset of tests each time, so
                test timing stats from the parent are not reliable.
            cluster: a cluster to associate jobs of this BuildStep with.
                Jobsteps will then only be run on slaves of the given cluster.
            other_repos: A list of dicts, where each dict describes an additional
                repo which should be checked out for the build. Each dict must
                specify a "repo" (either an absolute url or something like
                "foo.git", which will then use a base repo URL, if configured),
                and a "path" to clone the repo to. Default is git, but mercurial
                can be specified with "backend": "hg". Default revision is
                "origin/master" or "default" (for hg), but an explicit revision
                can be specified with "revision".
            artifact_search_path: Absolute path in which test artifacts can be
                found in. This defaults to the value for `path`.
            use_path_in_artifact_name: Tell Changes client to append the hash
                of the file path to the artifact name, before any file extension
                or suffixes.
            artifact_suffix: Tell Changes client to add a suffix to artifacts
                collected. For example, the value ".bazel" will rename
                "test.xml" to "test.bazel.xml". Defaults to the empty string.
        """
        if commands is None:
            raise ValueError("Missing required config: need commands")
        if any(
                type(int_field) != int
                for int_field in (cpus, memory, max_executors)):
            raise ValueError(
                "cpus, memory, and max_executors fields must be integers")

        if env is None:
            env = DEFAULT_ENV.copy()

        self.artifacts = artifacts
        self.env = env
        if repo_path:
            self.repo_path = repo_path
            # path is relative to repo_path (unless specified as an absolute path)
            self.path = os.path.join(repo_path, path) if path else repo_path
        else:
            # default repo_path to path if none specified
            self.repo_path = path or DEFAULT_PATH
            self.path = self.repo_path
        self.artifact_search_path = artifact_search_path if artifact_search_path else self.path
        self.use_path_in_artifact_name = use_path_in_artifact_name
        self.artifact_suffix = artifact_suffix if artifact_suffix is not None else ""
        self.release = release
        self.max_executors = max_executors
        self.resources = {
            'cpus': cpus,
            'mem': memory,
        }
        self.clean = clean
        self.debug_config = debug_config or {}
        self.test_stats_from = test_stats_from
        self.cluster = cluster
        future_commands = []
        for command in commands:
            command_copy = command.copy()
            if 'type' in command_copy:
                command_copy['type'] = CommandType[command_copy['type']]
            future_command = FutureCommand(**command_copy)
            self._set_command_defaults(future_command)
            future_commands.append(future_command)
        self.commands = future_commands

        self.other_repo_clone_commands = self._other_repo_clone_commands(
            other_repos)

        # this caches the snapshot image database object for a given job id.
        # we use it to avoid performing duplicate queries when
        # get_allocation_command() and get_allocation_params() are called.
        self._jobid2image = {}

        super(DefaultBuildStep, self).__init__(**kwargs)
Пример #10
0
    def expand(self, job, max_executors, test_stats_from=None):
        target_stats, avg_time = self.get_target_stats(test_stats_from
                                                       or self.project.slug)

        affected_targets = self.data['affected_targets']
        unaffected_targets = self.data['unaffected_targets']
        all_targets = affected_targets + unaffected_targets
        statsreporter.stats().set_gauge(
            '{}_bazel_affected_targets_count'.format(self.project.slug),
            len(affected_targets))
        statsreporter.stats().set_gauge(
            '{}_bazel_all_targets_count'.format(self.project.slug),
            len(all_targets))
        to_shard = all_targets

        # NOTE: null values for selective testing policy implies `disabled`
        if job.build.selective_testing_policy is SelectiveTestingPolicy.enabled:
            to_shard = affected_targets
            for target in unaffected_targets:
                # TODO(naphat) should we check if the target exists in the parent revision?
                # it should be impossible for it not to exist by our collect-targets script
                target_object = BazelTarget(
                    job=job,
                    name=target,
                    result_source=ResultSource.from_parent,
                )
                db.session.add(target_object)

        excluded_targets = self.data.get('excluded_targets')
        if excluded_targets:
            for target in excluded_targets:
                target_object = BazelTarget(
                    job=job,
                    name=target,
                    result=Result.skipped,
                    status=Status.finished,
                )
                db.session.add(target_object)
                target_message = BazelTargetMessage(
                    target=target_object,
                    text='This target was excluded by a tag.')
                db.session.add(target_message)

        messages = self.data.get('messages')
        if messages:
            for text in messages:
                message = BuildMessage(build_id=job.build_id, text=text)
                db.session.add(message)

        groups = shard(to_shard, max_executors, target_stats, avg_time)

        for weight, target_list in groups:
            future_command = FutureCommand(
                script=self.data['cmd'].format(
                    target_names=' '.join(target_list)),
                path=self.data.get('path'),
                env=self.data.get('env'),
                artifacts=self.data.get('artifacts'),
            )
            data = {
                'weight': weight,
                'targets': target_list,
                'shard_count': len(groups),
                'artifact_search_path': self.data['artifact_search_path'],
            }
            if 'dependency_map' in self.data:
                data['dependency_map'] = self.data['dependency_map']
            future_jobstep = FutureJobStep(
                label=self.data.get('label') or future_command.label,
                commands=[future_command],
                data=data,
            )
            yield future_jobstep
    def test_simple_expander(self, mock_get_expander,
                             mock_get_build_step_for_job):
        project = self.create_project()
        build = self.create_build(project)
        job = self.create_job(build)
        jobphase = self.create_jobphase(job)
        jobstep = self.create_jobstep(jobphase, data={
            'max_executors': 10,
        })
        plan = self.create_plan(project, label='test')
        self.create_step(plan)
        jobplan = self.create_job_plan(job, plan)
        command = self.create_command(jobstep,
                                      type=CommandType.collect_tests,
                                      status=Status.in_progress)

        def dummy_create_expanded_jobstep(jobstep, new_jobphase,
                                          future_jobstep):
            return future_jobstep.as_jobstep(new_jobphase)

        dummy_expander = Mock(spec=Expander)
        dummy_expander.expand.return_value = [
            FutureJobStep(
                label='test',
                commands=[
                    FutureCommand(script='echo 1', ),
                    FutureCommand(script='echo "foo"\necho "bar"', )
                ],
            )
        ]
        dummy_expander.default_phase_name.return_value = 'dummy'
        mock_get_expander.return_value.return_value = dummy_expander
        mock_buildstep = Mock(spec=BuildStep)
        mock_buildstep.create_expanded_jobstep.side_effect = dummy_create_expanded_jobstep

        mock_get_build_step_for_job.return_value = jobplan, mock_buildstep

        path = '/api/0/commands/{0}/'.format(command.id.hex)

        # missing output
        resp = self.client.post(path, data={
            'status': 'finished',
        })
        assert resp.status_code == 400, resp.data

        mock_get_expander.reset_mock()

        # valid params
        resp = self.client.post(path,
                                data={
                                    'status': 'finished',
                                    'output': '{"foo": "bar"}',
                                })
        assert resp.status_code == 200, resp.data

        mock_get_expander.assert_called_once_with(command.type)
        mock_get_expander.return_value.assert_called_once_with(
            project=project,
            data={'foo': 'bar'},
        )
        dummy_expander.validate.assert_called_once_with()
        dummy_expander.expand.assert_called_once_with(
            job=job,
            max_executors=10,
            test_stats_from=mock_buildstep.get_test_stats_from.return_value)

        phase2 = JobPhase.query.filter(
            JobPhase.job_id == job.id,
            JobPhase.id != jobphase.id,
        ).first()
        assert phase2.label == 'dummy'
        assert phase2.status == Status.queued

        new_jobstep = phase2.current_steps[0]
        assert new_jobstep.label == 'test'