Example #1
0
    def test_get_snapshot_image_given_snapshot(self):
        project = self.create_project()
        plan = self.create_plan(project)
        snapshot = self.create_snapshot(project)
        current_snapshot = self.create_snapshot(project)
        db.session.add(ProjectOption(
            project_id=project.id,
            name='snapshot.current',
            value=current_snapshot.id.hex,
        ))
        snapshot_image = self.create_snapshot_image(snapshot, plan)
        current_snapshot_image = self.create_snapshot_image(current_snapshot, plan)

        assert snapshot_image == SnapshotImage.get(plan, snapshot.id)
        assert current_snapshot_image == SnapshotImage.get(plan, current_snapshot.id)
Example #2
0
    def test_get_snapshot_image_dependent(self):
        project = self.create_project()
        plan_1 = self.create_plan(project)
        plan_2 = self.create_plan(project)
        plan_1.snapshot_plan_id = plan_2.id
        snapshot = self.create_snapshot(project)
        db.session.add(ProjectOption(
            project_id=project.id,
            name='snapshot.current',
            value=snapshot.id.hex,
        ))
        snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1)
        snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2)

        assert snapshot_image_2 == SnapshotImage.get(plan_1, snapshot.id)
        assert snapshot_image_2 == SnapshotImage.get(plan_2, snapshot.id)
Example #3
0
    def build_jobplan(cls, plan, job, snapshot_id=None):
        """Creates and returns a jobplan.

        Unless a snapshot_id is given, no snapshot will be used. This differs
        from the build index endpoint where the default is the current snapshot
        for a project.
        If a snapshot image is not found for a plan configured to use
        snapshots, a warning is given.
        """
        from changes.models.option import ItemOption
        from changes.models.snapshot import SnapshotImage

        plan_steps = sorted(plan.steps, key=lambda x: x.order)

        option_item_ids = [s.id for s in plan_steps]
        option_item_ids.append(plan.id)

        options = defaultdict(dict)
        options_query = db.session.query(
            ItemOption.item_id, ItemOption.name, ItemOption.value
        ).filter(
            ItemOption.item_id.in_(option_item_ids),
        )
        for item_id, opt_name, opt_value in options_query:
            options[item_id][opt_name] = opt_value

        snapshot = {
            'steps': [
                HistoricalImmutableStep.from_step(s, options[s.id]).to_json()
                for s in plan_steps
            ],
            'options': options[plan.id],
        }

        snapshot_image_id = None
        # TODO(paulruan): Remove behavior that just having a snapshot plan means
        #                 snapshot use is enabled. Just `snapshot.allow` should be sufficient.
        allow_snapshot = '1' == options[plan.id].get('snapshot.allow', '1') or plan.snapshot_plan
        if allow_snapshot and snapshot_id is not None:
            snapshot_image = SnapshotImage.get(plan, snapshot_id)
            if snapshot_image is not None:
                snapshot_image_id = snapshot_image.id

            if snapshot_image is None:
                logging.warning("Failed to find snapshot_image for %s's %s.",
                                plan.project.slug, plan.label)

        instance = cls(
            plan_id=plan.id,
            job_id=job.id,
            build_id=job.build_id,
            project_id=job.project_id,
            snapshot_image_id=snapshot_image_id,
            data={
                'snapshot': snapshot,
            },
        )

        return instance
Example #4
0
    def build_jobplan(cls, plan, job, snapshot_id=None):
        """Creates and returns a jobplan.

        Unless a snapshot_id is given, no snapshot will be used. This differs
        from the build index endpoint where the default is the current snapshot
        for a project.
        If a snapshot image is not found for a plan configured to use
        snapshots, a warning is given.
        """
        from changes.models.option import ItemOption
        from changes.models.snapshot import SnapshotImage

        plan_steps = sorted(plan.steps, key=lambda x: x.order)

        option_item_ids = [s.id for s in plan_steps]
        option_item_ids.append(plan.id)

        options = defaultdict(dict)
        options_query = db.session.query(
            ItemOption.item_id, ItemOption.name,
            ItemOption.value).filter(ItemOption.item_id.in_(option_item_ids), )
        for item_id, opt_name, opt_value in options_query:
            options[item_id][opt_name] = opt_value

        snapshot = {
            'steps': [
                HistoricalImmutableStep.from_step(s, options[s.id]).to_json()
                for s in plan_steps
            ],
            'options':
            options[plan.id],
        }

        snapshot_image_id = None
        # TODO(paulruan): Remove behavior that just having a snapshot plan means
        #                 snapshot use is enabled. Just `snapshot.allow` should be sufficient.
        allow_snapshot = '1' == options[plan.id].get('snapshot.allow',
                                                     '1') or plan.snapshot_plan
        if allow_snapshot and snapshot_id is not None:
            snapshot_image = SnapshotImage.get(plan, snapshot_id)
            if snapshot_image is not None:
                snapshot_image_id = snapshot_image.id

            if snapshot_image is None:
                logging.warning("Failed to find snapshot_image for %s's %s.",
                                plan.project.slug, plan.label)

        instance = cls(
            plan_id=plan.id,
            job_id=job.id,
            build_id=job.build_id,
            project_id=job.project_id,
            snapshot_image_id=snapshot_image_id,
            data={
                'snapshot': snapshot,
            },
        )

        return instance
    def test_get_snapshot_image_dependent(self):
        project = self.create_project()
        plan_1 = self.create_plan(project)
        plan_2 = self.create_plan(project)
        plan_1.snapshot_plan_id = plan_2.id
        snapshot = self.create_snapshot(project)
        db.session.add(
            ProjectOption(
                project_id=project.id,
                name='snapshot.current',
                value=snapshot.id.hex,
            ))
        snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1)
        snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2)

        assert snapshot_image_2 == SnapshotImage.get(plan_1, snapshot.id)
        assert snapshot_image_2 == SnapshotImage.get(plan_2, snapshot.id)
    def test_get_snapshot_image_given_snapshot(self):
        project = self.create_project()
        plan = self.create_plan(project)
        snapshot = self.create_snapshot(project)
        current_snapshot = self.create_snapshot(project)
        db.session.add(
            ProjectOption(
                project_id=project.id,
                name='snapshot.current',
                value=current_snapshot.id.hex,
            ))
        snapshot_image = self.create_snapshot_image(snapshot, plan)
        current_snapshot_image = self.create_snapshot_image(
            current_snapshot, plan)

        assert snapshot_image == SnapshotImage.get(plan, snapshot.id)
        assert current_snapshot_image == SnapshotImage.get(
            plan, current_snapshot.id)
Example #7
0
    def post(self):
        """
        Create a new commit or diff build. The API roughly goes like this:

        1. Identify the project(s) to build for. This can be done by specifying
        ``project``, ``repository``, or ``repository[callsign]``. If a repository is
        specified somehow, then all projects for that repository are considered
        for building.

        2. Using the ``sha``, find the appropriate revision object. This may
        involve updating the repo.

        3. If ``patch`` is given, then apply the patch and mark this as a diff build.
        Otherwise, this is a commit build.

        4. If ``snapshot_id`` is given, verify that the snapshot can be used by all
        projects.

        5. If provided, apply project_whitelist, filtering out projects not in
        this whitelist.

        6. Based on the flag ``apply_project_files_trigger`` (see comment on the argument
        itself for default values), decide whether or not to filter out projects
        by file blacklist and whitelist.

        7. Attach metadata and create/ensure existence of a build for each project,
        depending on the flag ``ensure_only``.

        NOTE: In ensure-only mode, the collection_ids of the returned builds are
        not necessarily identical, as we give new builds new collection IDs
        and preserve the existing builds' collection IDs.

        NOTE: If ``patch`` is specified ``sha`` is assumed to be the original
        base revision to apply the patch.

        Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev
        used to apply the patch. See ``find_green_parent_sha`` for the logic of
        identifying the correct revision.
        """
        args = self.parser.parse_args()

        if args.patch_file and args.ensure_only:
            return error("Ensure-only mode does not work with a diff build yet.",
                         problems=["patch", "ensure_only"])

        if not (args.project or args.repository or args['repository[phabricator.callsign]']):
            return error("Project or repository must be specified",
                         problems=["project", "repository", "repository[phabricator.callsign]"])

        # read arguments
        if args.patch_data:
            try:
                patch_data = json.loads(args.patch_data)
            except Exception:
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])

            if not isinstance(patch_data, dict):
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])
        else:
            patch_data = None

        # 1. identify project(s)
        projects, repository = try_get_projects_and_repository(args)

        if not projects:
            return error("Unable to find project(s).")

        # read arguments
        label = args.label
        author = args.author
        message = args.message
        tag = args.tag
        snapshot_id = args.snapshot_id
        no_snapshot = args.no_snapshot

        cause = Cause[args.cause]

        if no_snapshot and snapshot_id:
            return error("Cannot specify snapshot with no_snapshot option")

        if not tag and args.patch_file:
            tag = 'patch'

        # 2. validate snapshot
        if snapshot_id:
            snapshot = Snapshot.query.get(snapshot_id)
            if not snapshot:
                return error("Unable to find snapshot.")
            if snapshot.status != SnapshotStatus.active:
                return error("Snapshot is in an invalid state: %s" % snapshot.status)
            for project in projects:
                plans = get_build_plans(project)
                for plan in plans:
                    plan_options = plan.get_item_options()
                    allow_snapshot = '1' == plan_options.get('snapshot.allow', '1') or plan.snapshot_plan
                    if allow_snapshot and not SnapshotImage.get(plan, snapshot_id):
                        # We want to create a build using a specific snapshot but no image
                        # was found for this plan so fail.
                        return error("Snapshot cannot be applied to %s's %s" % (project.slug, plan.label))

        # 3. find revision
        try:
            revision = identify_revision(repository, args.sha)
        except MissingRevision:
            # if the default fails, we absolutely can't continue and the
            # client should send a valid revision
            return error("Unable to find commit %s in %s." % (args.sha, repository.url),
                         problems=['sha', 'repository'])

        # get default values for arguments
        if revision:
            if not author:
                author = revision.author
            if not label:
                label = revision.subject
            # only default the message if its absolutely not set
            if message is None:
                message = revision.message
            sha = revision.sha
        else:
            sha = args.sha

        if not args.target:
            target = sha[:12]
        else:
            target = args.target[:128]

        if not label:
            if message:
                label = message.splitlines()[0]
            if not label:
                label = 'A homeless build'
        label = label[:128]

        # 4. Check for patch
        if args.patch_file:
            fp = StringIO()
            for line in args.patch_file:
                fp.write(line)
            patch_file = fp
        else:
            patch_file = None

        if patch_file:
            patch = Patch(
                repository=repository,
                parent_revision_sha=sha,
                diff=patch_file.getvalue(),
            )
            db.session.add(patch)
        else:
            patch = None

        project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist'])

        # mark as commit or diff build
        if not patch:
            is_commit_build = True
        else:
            is_commit_build = False

        apply_project_files_trigger = args.apply_project_files_trigger
        if apply_project_files_trigger is None:
            apply_project_files_trigger = args.apply_file_whitelist
        if apply_project_files_trigger is None:
            if is_commit_build:
                apply_project_files_trigger = False
            else:
                apply_project_files_trigger = True

        if apply_project_files_trigger:
            if patch:
                diff_parser = DiffParser(patch.diff)
                files_changed = diff_parser.get_changed_files()
            elif revision:
                try:
                    files_changed = _get_revision_changed_files(repository, revision)
                except MissingRevision:
                    return error("Unable to find commit %s in %s." % (args.sha, repository.url),
                                 problems=['sha', 'repository'])
            else:
                # the only way that revision can be null is if this repo does not have a vcs backend
                logging.warning('Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.', sha, repository.url)
                files_changed = None
        else:
            # we won't be applying file whitelist, so there is no need to get the list of changed files.
            files_changed = None

        collection_id = uuid.uuid4()

        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s', project.slug)
                continue
            # 5. apply project whitelist as appropriate
            if args.project_whitelist is not None and project.slug not in args.project_whitelist:
                logging.info('Project %s is not in the supplied whitelist', project.slug)
                continue
            forced_sha = sha
            # TODO(dcramer): find_green_parent_sha needs to take branch
            # into account
            # if patch_file:
            #     forced_sha = find_green_parent_sha(
            #         project=project,
            #         sha=sha,
            #     )

            # 6. apply file whitelist as appropriate
            diff = None
            if patch is not None:
                diff = patch.diff
            if (
                apply_project_files_trigger and
                files_changed is not None and
                not files_changed_should_trigger_project(
                    files_changed, project, project_options[project.id], sha, diff)
            ):
                logging.info('Changed files do not trigger build for project %s', project.slug)
                continue
            # 7. create/ensure build
            build_message = None
            selective_testing_policy = SelectiveTestingPolicy.disabled
            if args.selective_testing and project_lib.contains_active_autogenerated_plan(project):
                if is_commit_build:
                    selective_testing_policy, reasons = get_selective_testing_policy(project, sha, diff)
                    if reasons:
                        if selective_testing_policy is SelectiveTestingPolicy.disabled:
                            reasons = ["Selective testing was requested but not done because:"] + ['    ' + m for m in reasons]
                        build_message = '\n'.join(reasons)
                else:
                    # NOTE: for diff builds, it makes sense to just do selective testing,
                    # since it will never become a parent build and will never be used to
                    # calculate revision results.
                    selective_testing_policy = SelectiveTestingPolicy.enabled
            if args.ensure_only:
                potentials = list(Build.query.filter(
                    Build.project_id == project.id,
                    Build.source.has(revision_sha=sha, patch=patch),
                ).order_by(
                    Build.date_created.desc()  # newest first
                ).limit(1))
                if len(potentials) == 0:
                    builds.append(create_build(
                        project=project,
                        collection_id=collection_id,
                        sha=forced_sha,
                        target=target,
                        label=label,
                        message=message,
                        author=author,
                        patch=patch,
                        source_data=patch_data,
                        tag=tag,
                        cause=cause,
                        snapshot_id=snapshot_id,
                        no_snapshot=no_snapshot,
                        selective_testing_policy=selective_testing_policy,
                    ))
                else:
                    builds.append(potentials[0])
            else:
                builds.append(create_build(
                    project=project,
                    collection_id=collection_id,
                    sha=forced_sha,
                    target=target,
                    label=label,
                    message=message,
                    author=author,
                    patch=patch,
                    source_data=patch_data,
                    tag=tag,
                    cause=cause,
                    snapshot_id=snapshot_id,
                    no_snapshot=no_snapshot,
                    selective_testing_policy=selective_testing_policy,
                ))

            if build_message:
                message = BuildMessage(
                    build=builds[-1],
                    text=build_message,
                )
                db.session.add(message)
                db.session.commit()

        return self.respond(builds)
Example #8
0
def execute_build(build, snapshot_id, no_snapshot):
    if no_snapshot:
        assert snapshot_id is None, 'Cannot specify snapshot with no_snapshot option'
    # TODO(dcramer): most of this should be abstracted into sync_build as if it
    # were a "im on step 0, create step 1"
    project = build.project

    # We choose a snapshot before creating jobplans. This is so that different
    # jobplans won't end up using different snapshots in a build.
    if snapshot_id is None and not no_snapshot:
        snapshot = Snapshot.get_current(project.id)
        if snapshot:
            snapshot_id = snapshot.id

    plans = get_build_plans(project)

    options = ItemOptionsHelper.get_options([p.id for p in plans], ['snapshot.require'])

    jobs = []
    for plan in get_build_plans(project):
        if (options[plan.id].get('snapshot.require', '0') == '1' and
                not no_snapshot and
                SnapshotImage.get(plan, snapshot_id) is None):
            logging.warning('Skipping plan %r (%r) because no snapshot exists yet', plan.label, project.slug)
            continue

        job = Job(
            build=build,
            build_id=build.id,
            project=project,
            project_id=project.id,
            source=build.source,
            source_id=build.source_id,
            status=build.status,
            label=plan.label,
        )

        db.session.add(job)

        jobplan = JobPlan.build_jobplan(plan, job, snapshot_id=snapshot_id)

        db.session.add(jobplan)

        jobs.append(job)

    db.session.commit()

    for job in jobs:
        create_job.delay(
            job_id=job.id.hex,
            task_id=job.id.hex,
            parent_task_id=job.build_id.hex,
        )

    db.session.commit()

    sync_build.delay(
        build_id=build.id.hex,
        task_id=build.id.hex,
    )

    return build
Example #9
0
 def create_snapshot_image(self, snapshot, plan, **kwargs):
     image = SnapshotImage(snapshot=snapshot, plan=plan, **kwargs)
     db.session.add(image)
     db.session.commit()
     return image
Example #10
0
    def post(self):
        """
        Create a new commit or diff build. The API roughly goes like this:

        1. Identify the project(s) to build for. This can be done by specifying
        ``project``, ``repository``, or ``repository[callsign]``. If a repository is
        specified somehow, then all projects for that repository are considered
        for building.

        2. Using the ``sha``, find the appropriate revision object. This may
        involve updating the repo.

        3. If ``patch`` is given, then apply the patch and mark this as a diff build.
        Otherwise, this is a commit build.

        4. If ``snapshot_id`` is given, verify that the snapshot can be used by all
        projects.

        5. If provided, apply project_whitelist, filtering out projects not in
        this whitelist.

        6. Based on the flag ``apply_project_files_trigger`` (see comment on the argument
        itself for default values), decide whether or not to filter out projects
        by file blacklist and whitelist.

        7. Attach metadata and create/ensure existence of a build for each project,
        depending on the flag ``ensure_only``.

        NOTE: In ensure-only mode, the collection_ids of the returned builds are
        not necessarily identical, as we give new builds new collection IDs
        and preserve the existing builds' collection IDs.

        NOTE: If ``patch`` is specified ``sha`` is assumed to be the original
        base revision to apply the patch.

        Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev
        used to apply the patch. See ``find_green_parent_sha`` for the logic of
        identifying the correct revision.
        """
        args = self.parser.parse_args()

        if args.patch_file and args.ensure_only:
            return error(
                "Ensure-only mode does not work with a diff build yet.",
                problems=["patch", "ensure_only"])

        if not (args.project or args.repository
                or args['repository[phabricator.callsign]']):
            return error("Project or repository must be specified",
                         problems=[
                             "project", "repository",
                             "repository[phabricator.callsign]"
                         ])

        # read arguments
        if args.patch_data:
            try:
                patch_data = json.loads(args.patch_data)
            except Exception:
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])

            if not isinstance(patch_data, dict):
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])
        else:
            patch_data = None

        # 1. identify project(s)
        projects, repository = try_get_projects_and_repository(args)

        if not projects:
            return error("Unable to find project(s).")

        # read arguments
        label = args.label
        author = args.author
        message = args.message
        tag = args.tag
        snapshot_id = args.snapshot_id
        no_snapshot = args.no_snapshot

        cause = Cause[args.cause]

        if no_snapshot and snapshot_id:
            return error("Cannot specify snapshot with no_snapshot option")

        if not tag and args.patch_file:
            tag = 'patch'

        # 2. validate snapshot
        if snapshot_id:
            snapshot = Snapshot.query.get(snapshot_id)
            if not snapshot:
                return error("Unable to find snapshot.")
            if snapshot.status != SnapshotStatus.active:
                return error("Snapshot is in an invalid state: %s" %
                             snapshot.status)
            for project in projects:
                plans = get_build_plans(project)
                for plan in plans:
                    plan_options = plan.get_item_options()
                    allow_snapshot = '1' == plan_options.get(
                        'snapshot.allow', '1') or plan.snapshot_plan
                    if allow_snapshot and not SnapshotImage.get(
                            plan, snapshot_id):
                        # We want to create a build using a specific snapshot but no image
                        # was found for this plan so fail.
                        return error("Snapshot cannot be applied to %s's %s" %
                                     (project.slug, plan.label))

        # 3. find revision
        try:
            revision = identify_revision(repository, args.sha)
        except MissingRevision:
            # if the default fails, we absolutely can't continue and the
            # client should send a valid revision
            return error("Unable to find commit %s in %s." %
                         (args.sha, repository.url),
                         problems=['sha', 'repository'])

        # get default values for arguments
        if revision:
            if not author:
                author = revision.author
            if not label:
                label = revision.subject
            # only default the message if its absolutely not set
            if message is None:
                message = revision.message
            sha = revision.sha
        else:
            sha = args.sha

        if not args.target:
            target = sha[:12]
        else:
            target = args.target[:128]

        if not label:
            if message:
                label = message.splitlines()[0]
            if not label:
                label = 'A homeless build'
        label = label[:128]

        # 4. Check for patch
        if args.patch_file:
            fp = StringIO()
            for line in args.patch_file:
                fp.write(line)
            patch_file = fp
        else:
            patch_file = None

        if patch_file:
            patch = Patch(
                repository=repository,
                parent_revision_sha=sha,
                diff=patch_file.getvalue(),
            )
            db.session.add(patch)
        else:
            patch = None

        project_options = ProjectOptionsHelper.get_options(
            projects, ['build.file-whitelist'])

        # mark as commit or diff build
        if not patch:
            is_commit_build = True
        else:
            is_commit_build = False

        apply_project_files_trigger = args.apply_project_files_trigger
        if apply_project_files_trigger is None:
            apply_project_files_trigger = args.apply_file_whitelist
        if apply_project_files_trigger is None:
            if is_commit_build:
                apply_project_files_trigger = False
            else:
                apply_project_files_trigger = True

        if apply_project_files_trigger:
            if patch:
                diff_parser = DiffParser(patch.diff)
                files_changed = diff_parser.get_changed_files()
            elif revision:
                try:
                    files_changed = _get_revision_changed_files(
                        repository, revision)
                except MissingRevision:
                    return error("Unable to find commit %s in %s." %
                                 (args.sha, repository.url),
                                 problems=['sha', 'repository'])
            else:
                # the only way that revision can be null is if this repo does not have a vcs backend
                logging.warning(
                    'Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.',
                    sha, repository.url)
                files_changed = None
        else:
            # we won't be applying file whitelist, so there is no need to get the list of changed files.
            files_changed = None

        collection_id = uuid.uuid4()

        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s',
                                project.slug)
                continue
            # 5. apply project whitelist as appropriate
            if args.project_whitelist is not None and project.slug not in args.project_whitelist:
                logging.info('Project %s is not in the supplied whitelist',
                             project.slug)
                continue
            forced_sha = sha
            # TODO(dcramer): find_green_parent_sha needs to take branch
            # into account
            # if patch_file:
            #     forced_sha = find_green_parent_sha(
            #         project=project,
            #         sha=sha,
            #     )

            # 6. apply file whitelist as appropriate
            diff = None
            if patch is not None:
                diff = patch.diff
            if (apply_project_files_trigger and files_changed is not None
                    and not files_changed_should_trigger_project(
                        files_changed, project, project_options[project.id],
                        sha, diff)):
                logging.info(
                    'Changed files do not trigger build for project %s',
                    project.slug)
                continue
            # 7. create/ensure build
            build_message = None
            selective_testing_policy = SelectiveTestingPolicy.disabled
            if args.selective_testing and project_lib.contains_active_autogenerated_plan(
                    project):
                if is_commit_build:
                    selective_testing_policy, reasons = get_selective_testing_policy(
                        project, sha, diff)
                    if reasons:
                        if selective_testing_policy is SelectiveTestingPolicy.disabled:
                            reasons = [
                                "Selective testing was requested but not done because:"
                            ] + ['    ' + m for m in reasons]
                        build_message = '\n'.join(reasons)
                else:
                    # NOTE: for diff builds, it makes sense to just do selective testing,
                    # since it will never become a parent build and will never be used to
                    # calculate revision results.
                    selective_testing_policy = SelectiveTestingPolicy.enabled
            if args.ensure_only:
                potentials = list(
                    Build.query.filter(
                        Build.project_id == project.id,
                        Build.source.has(revision_sha=sha, patch=patch),
                    ).order_by(Build.date_created.desc()  # newest first
                               ).limit(1))
                if len(potentials) == 0:
                    builds.append(
                        create_build(
                            project=project,
                            collection_id=collection_id,
                            sha=forced_sha,
                            target=target,
                            label=label,
                            message=message,
                            author=author,
                            patch=patch,
                            source_data=patch_data,
                            tag=tag,
                            cause=cause,
                            snapshot_id=snapshot_id,
                            no_snapshot=no_snapshot,
                            selective_testing_policy=selective_testing_policy,
                        ))
                else:
                    builds.append(potentials[0])
            else:
                builds.append(
                    create_build(
                        project=project,
                        collection_id=collection_id,
                        sha=forced_sha,
                        target=target,
                        label=label,
                        message=message,
                        author=author,
                        patch=patch,
                        source_data=patch_data,
                        tag=tag,
                        cause=cause,
                        snapshot_id=snapshot_id,
                        no_snapshot=no_snapshot,
                        selective_testing_policy=selective_testing_policy,
                    ))

            if build_message:
                message = BuildMessage(
                    build=builds[-1],
                    text=build_message,
                )
                db.session.add(message)
                db.session.commit()

        return self.respond(builds)
Example #11
0
def execute_build(build, snapshot_id, no_snapshot):
    if no_snapshot:
        assert snapshot_id is None, 'Cannot specify snapshot with no_snapshot option'
    # TODO(dcramer): most of this should be abstracted into sync_build as if it
    # were a "im on step 0, create step 1"
    project = build.project

    # We choose a snapshot before creating jobplans. This is so that different
    # jobplans won't end up using different snapshots in a build.
    if snapshot_id is None and not no_snapshot:
        snapshot = Snapshot.get_current(project.id)
        if snapshot:
            snapshot_id = snapshot.id

    plans = get_build_plans(project)

    options = ItemOptionsHelper.get_options([p.id for p in plans],
                                            ['snapshot.require'])

    jobs = []
    for plan in get_build_plans(project):
        if (options[plan.id].get('snapshot.require', '0') == '1'
                and not no_snapshot
                and SnapshotImage.get(plan, snapshot_id) is None):
            logging.warning(
                'Skipping plan %r (%r) because no snapshot exists yet',
                plan.label, project.slug)
            continue

        job = Job(
            build=build,
            build_id=build.id,
            project=project,
            project_id=project.id,
            source=build.source,
            source_id=build.source_id,
            status=build.status,
            label=plan.label,
        )

        db.session.add(job)

        jobplan = JobPlan.build_jobplan(plan, job, snapshot_id=snapshot_id)

        db.session.add(jobplan)

        jobs.append(job)

    db.session.commit()

    for job in jobs:
        create_job.delay(
            job_id=job.id.hex,
            task_id=job.id.hex,
            parent_task_id=job.build_id.hex,
        )

    db.session.commit()

    sync_build.delay(
        build_id=build.id.hex,
        task_id=build.id.hex,
    )

    return build
Example #12
0
    def post(self, project_id):
        """Initiates a new snapshot for this project."""
        project = Project.get(project_id)
        if not project:
            return '', 404

        args = self.post_parser.parse_args()

        repository = project.repository

        try:
            revision = identify_revision(repository, args.sha)
        except MissingRevision:
            # if the default fails, we absolutely can't continue and the
            # client should send a valid revision
            return error("Unable to find a matching revision.")

        if revision:
            sha = revision.sha
        else:
            sha = args.sha

        plan_list = get_snapshottable_plans(project)

        if not plan_list:
            return error("No snapshottable plans associated with project.")

        source, _ = get_or_create(Source,
                                  where={
                                      'repository': repository,
                                      'revision_sha': sha,
                                      'patch_id': None,
                                  })

        build = Build(
            source_id=source.id,
            source=source,
            project_id=project.id,
            project=project,
            label='Create Snapshot',
            status=Status.queued,
            cause=Cause.snapshot,
            target=sha[:12],
            tags=['snapshot'],
            # Snapshot builds are often part of the solution to queueing, so we make them
            # high priority to schedule them sooner.
            priority=BuildPriority.high,
        )
        db.session.add(build)

        # TODO(dcramer): this needs to update with the build result
        snapshot = Snapshot(
            project_id=project.id,
            source_id=source.id,
            build_id=build.id,
            status=SnapshotStatus.pending,
        )
        db.session.add(snapshot)

        jobs = []
        for plan in plan_list:
            job = Job(
                build=build,
                build_id=build.id,
                project=project,
                project_id=project.id,
                source=build.source,
                source_id=build.source_id,
                status=build.status,
                label='Create Snapshot: %s' % (plan.label, ),
            )
            db.session.add(job)

            jobplan = JobPlan.build_jobplan(plan, job)
            db.session.add(jobplan)

            image = SnapshotImage(
                job=job,
                snapshot=snapshot,
                plan=plan,
            )
            db.session.add(image)

            jobs.append(job)

        db.session.commit()

        for job in jobs:
            create_job.delay(
                job_id=job.id.hex,
                task_id=job.id.hex,
                parent_task_id=job.build_id.hex,
            )

        db.session.commit()

        sync_build.delay(
            build_id=build.id.hex,
            task_id=build.id.hex,
        )

        return self.respond(snapshot)