def test_with_timed_skips(self):
        # No builds yet
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.return_value = {
                'build.file-blacklist': [],
                'build.minimum-minutes-between-builds': 30,
            }
            assert files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None

        # Stale build
        self.create_build(self.project,
                          date_created=datetime.now() - timedelta(minutes=31))

        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.return_value = {
                'build.file-blacklist': [],
                'build.minimum-minutes-between-builds': 30,
            }
            assert files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None

        # Create recent build
        self.create_build(self.project,
                          date_created=datetime.now() - timedelta(minutes=29))

        # Should fail because of recently created build.
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.return_value = {
                'build.file-blacklist': [],
                'build.minimum-minutes-between-builds': 30,
            }
            assert not files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
예제 #2
0
    def test_with_timed_skips(self):
        # No builds yet
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.return_value = {
                'build.file-blacklist': [],
                'build.minimum-minutes-between-builds': 30,
            }
            assert files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None

        # Stale build
        self.create_build(self.project, date_created=datetime.now() - timedelta(minutes=31))

        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.return_value = {
                'build.file-blacklist': [],
                'build.minimum-minutes-between-builds': 30,
            }
            assert files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None

        # Create recent build
        self.create_build(self.project, date_created=datetime.now() - timedelta(minutes=29))

        # Should fail because of recently created build.
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.return_value = {
                'build.file-blacklist': [],
                'build.minimum-minutes-between-builds': 30,
            }
            assert not files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
예제 #3
0
 def test_config_changed(self):
     assert files_changed_should_trigger_project(
         [self.project.get_config_path()],
         self.project,
         {},
         self.revision.sha,
     )
 def test_config_changed(self):
     assert files_changed_should_trigger_project(
         [self.project.get_config_path()],
         self.project,
         {},
         self.revision.sha,
     )
예제 #5
0
    def run(self):
        revision = self.revision

        project_list = self.get_project_list()
        if not project_list:
            return

        options = ProjectOptionsHelper.get_options(project_list, [
            'build.branch-names',
            'build.commit-trigger',
            'build.file-whitelist',
        ])

        files_changed = self.get_changed_files()

        projects_to_build = []
        for project in project_list:
            if options[project.id].get('build.commit-trigger', '1') != '1':
                self.logger.info('build.commit-trigger is disabled for project %s', project.slug)
                continue

            branch_names = filter(bool, options[project.id].get('build.branch-names', '*').split(' '))
            if not revision.should_build_branch(branch_names):
                self.logger.info('No branches matched build.branch-names for project %s', project.slug)
                continue

            try:
                if not files_changed_should_trigger_project(files_changed, project, options[project.id], revision.sha):
                    self.logger.info('No changed files matched project trigger for project %s', project.slug)
                    continue
            except ProjectConfigError:
                author_name = '(unknown)'
                if revision.author_id:
                    author_name = revision.author.name
                self.logger.error('Project config for project %s is not in a valid format. Author is %s.', project.slug, author_name, exc_info=True)

            projects_to_build.append(project.slug)

        for project_slug in projects_to_build:
            data = {
                'sha': revision.sha,
                'project': project_slug,
                'tag': 'commit',
            }
            with current_app.test_request_context('/api/0/builds/', method='POST', data=data):
                try:
                    response = BuildIndexAPIView().post()
                except Exception as e:
                    self.logger.exception('Failed to create build: %s' % (e,))
                else:
                    if isinstance(response, (list, tuple)):
                        response, status = response
                        if status != 200:
                            self.logger.error('Failed to create build: %s' % (response,), extra={
                                'data': data,
                            })
 def test_whitelist_empty(self):
     with mock.patch('changes.models.project.Project.get_config') as mocked:
         mocked.return_value = {'build.file-blacklist': []}
         assert files_changed_should_trigger_project(
             ['a', 'b'],
             self.project,
             {},
             self.revision.sha,
         )
         (sha, diff, _), _ = mocked.call_args
         assert sha == self.revision.sha
         assert diff is None
예제 #7
0
 def test_whitelist_empty(self):
     with mock.patch('changes.models.Project.get_config') as mocked:
         mocked.return_value = {
             'build.file-blacklist': []
         }
         assert files_changed_should_trigger_project(
             ['a', 'b'],
             self.project,
             {},
             self.revision.sha,
         )
         (sha, diff, _), _ = mocked.call_args
         assert sha == self.revision.sha
         assert diff is None
예제 #8
0
 def _get_projects_for_diff(self, diff, files_changed):
     projects = list(Project.query.options(
         subqueryload_all('plans'),
     ).filter(
         Project.status == ProjectStatus.active,
         Project.repository_id == diff.source.repository_id,
     ))
     project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist', 'phabricator.diff-trigger'])
     projects = [
         x for x in projects
         if get_build_plans(x) and
         project_options[x.id].get('phabricator.diff-trigger', '1') == '1' and
         files_changed_should_trigger_project(files_changed, x, project_options[x.id], diff.source.revision_sha, diff=diff.source.patch.diff)
     ]
     return projects
 def test_with_no_skips(self):
     with mock.patch('changes.models.project.Project.get_config') as mocked:
         mocked.return_value = {
             'build.file-blacklist': [],
             'build.minimum-minutes-between-builds': 0,
         }
         assert files_changed_should_trigger_project(
             ['a', 'b'],
             self.project,
             {},
             self.revision.sha,
         )
         (sha, diff, _), _ = mocked.call_args
         assert sha == self.revision.sha
         assert diff is None
예제 #10
0
 def test_with_no_skips(self):
     with mock.patch('changes.models.project.Project.get_config') as mocked:
         mocked.return_value = {
             'build.file-blacklist': [],
             'build.minimum-minutes-between-builds': 0,
         }
         assert files_changed_should_trigger_project(
             ['a', 'b'],
             self.project,
             {},
             self.revision.sha,
         )
         (sha, diff, _), _ = mocked.call_args
         assert sha == self.revision.sha
         assert diff is None
    def test_yaml_invalid_whitelist_matched(self):
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.side_effect = ProjectConfigError
            # this time the whitelist *does* match
            assert files_changed_should_trigger_project(
                ['a', 'b', 'y/a.txt'],
                self.project,
                {'build.file-whitelist': """
x
y/a.txt
z
"""},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None
예제 #12
0
    def test_yaml_invalid_whitelist_matched(self):
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.side_effect = ProjectConfigError
            # this time the whitelist *does* match
            assert files_changed_should_trigger_project(
                ['a', 'b', 'y/a.txt'],
                self.project,
                {'build.file-whitelist': """
x
y/a.txt
z
"""},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None
예제 #13
0
    def test_yaml_invalid_whitelist_unmatched(self):
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.side_effect = ProjectConfigError
            # Should still return False even though blacklist/yaml config is invalid.
            # This avoids potentially disastrous results where all whitelists are ignored
            # upon encountering an invalid yaml file.
            assert not files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {'build.file-whitelist': """
x
y/a.txt
z
"""},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None
예제 #14
0
 def _get_projects_for_diff(self, diff, files_changed):
     projects = list(
         Project.query.options(subqueryload_all('plans'), ).filter(
             Project.status == ProjectStatus.active,
             Project.repository_id == diff.source.repository_id,
         ))
     project_options = ProjectOptionsHelper.get_options(
         projects, ['build.file-whitelist', 'phabricator.diff-trigger'])
     projects = [
         x for x in projects
         if get_build_plans(x) and project_options[x.id].get(
             'phabricator.diff-trigger', '1') == '1' and
         files_changed_should_trigger_project(files_changed,
                                              x,
                                              project_options[x.id],
                                              diff.source.revision_sha,
                                              diff=diff.source.patch.diff)
     ]
     return projects
    def test_yaml_invalid_whitelist_unmatched(self):
        with mock.patch('changes.models.project.Project.get_config') as mocked:
            mocked.side_effect = ProjectConfigError
            # Should still return False even though blacklist/yaml config is invalid.
            # This avoids potentially disastrous results where all whitelists are ignored
            # upon encountering an invalid yaml file.
            assert not files_changed_should_trigger_project(
                ['a', 'b'],
                self.project,
                {'build.file-whitelist': """
x
y/a.txt
z
"""},
                self.revision.sha,
            )
            (sha, diff, _), _ = mocked.call_args
            assert sha == self.revision.sha
            assert diff is None
예제 #16
0
    def post(self):
        """
        Create a new commit or diff build. The API roughly goes like this:

        1. Identify the project(s) to build for. This can be done by specifying
        ``project``, ``repository``, or ``repository[callsign]``. If a repository is
        specified somehow, then all projects for that repository are considered
        for building.

        2. Using the ``sha``, find the appropriate revision object. This may
        involve updating the repo.

        3. If ``patch`` is given, then apply the patch and mark this as a diff build.
        Otherwise, this is a commit build.

        4. If ``snapshot_id`` is given, verify that the snapshot can be used by all
        projects.

        5. If provided, apply project_whitelist, filtering out projects not in
        this whitelist.

        6. Based on the flag ``apply_project_files_trigger`` (see comment on the argument
        itself for default values), decide whether or not to filter out projects
        by file blacklist and whitelist.

        7. Attach metadata and create/ensure existence of a build for each project,
        depending on the flag ``ensure_only``.

        NOTE: In ensure-only mode, the collection_ids of the returned builds are
        not necessarily identical, as we give new builds new collection IDs
        and preserve the existing builds' collection IDs.

        NOTE: If ``patch`` is specified ``sha`` is assumed to be the original
        base revision to apply the patch.

        Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev
        used to apply the patch. See ``find_green_parent_sha`` for the logic of
        identifying the correct revision.
        """
        args = self.parser.parse_args()

        if args.patch_file and args.ensure_only:
            return error("Ensure-only mode does not work with a diff build yet.",
                         problems=["patch", "ensure_only"])

        if not (args.project or args.repository or args['repository[phabricator.callsign]']):
            return error("Project or repository must be specified",
                         problems=["project", "repository", "repository[phabricator.callsign]"])

        # read arguments
        if args.patch_data:
            try:
                patch_data = json.loads(args.patch_data)
            except Exception:
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])

            if not isinstance(patch_data, dict):
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])
        else:
            patch_data = None

        # 1. identify project(s)
        projects, repository = try_get_projects_and_repository(args)

        if not projects:
            return error("Unable to find project(s).")

        # read arguments
        label = args.label
        author = args.author
        message = args.message
        tag = args.tag
        snapshot_id = args.snapshot_id
        no_snapshot = args.no_snapshot

        cause = Cause[args.cause]

        if no_snapshot and snapshot_id:
            return error("Cannot specify snapshot with no_snapshot option")

        if not tag and args.patch_file:
            tag = 'patch'

        # 2. validate snapshot
        if snapshot_id:
            snapshot = Snapshot.query.get(snapshot_id)
            if not snapshot:
                return error("Unable to find snapshot.")
            if snapshot.status != SnapshotStatus.active:
                return error("Snapshot is in an invalid state: %s" % snapshot.status)
            for project in projects:
                plans = get_build_plans(project)
                for plan in plans:
                    plan_options = plan.get_item_options()
                    allow_snapshot = '1' == plan_options.get('snapshot.allow', '1') or plan.snapshot_plan
                    if allow_snapshot and not SnapshotImage.get(plan, snapshot_id):
                        # We want to create a build using a specific snapshot but no image
                        # was found for this plan so fail.
                        return error("Snapshot cannot be applied to %s's %s" % (project.slug, plan.label))

        # 3. find revision
        try:
            revision = identify_revision(repository, args.sha)
        except MissingRevision:
            # if the default fails, we absolutely can't continue and the
            # client should send a valid revision
            return error("Unable to find commit %s in %s." % (args.sha, repository.url),
                         problems=['sha', 'repository'])

        # get default values for arguments
        if revision:
            if not author:
                author = revision.author
            if not label:
                label = revision.subject
            # only default the message if its absolutely not set
            if message is None:
                message = revision.message
            sha = revision.sha
        else:
            sha = args.sha

        if not args.target:
            target = sha[:12]
        else:
            target = args.target[:128]

        if not label:
            if message:
                label = message.splitlines()[0]
            if not label:
                label = 'A homeless build'
        label = label[:128]

        # 4. Check for patch
        if args.patch_file:
            fp = StringIO()
            for line in args.patch_file:
                fp.write(line)
            patch_file = fp
        else:
            patch_file = None

        if patch_file:
            patch = Patch(
                repository=repository,
                parent_revision_sha=sha,
                diff=patch_file.getvalue(),
            )
            db.session.add(patch)
        else:
            patch = None

        project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist'])

        # mark as commit or diff build
        if not patch:
            is_commit_build = True
        else:
            is_commit_build = False

        apply_project_files_trigger = args.apply_project_files_trigger
        if apply_project_files_trigger is None:
            apply_project_files_trigger = args.apply_file_whitelist
        if apply_project_files_trigger is None:
            if is_commit_build:
                apply_project_files_trigger = False
            else:
                apply_project_files_trigger = True

        if apply_project_files_trigger:
            if patch:
                diff_parser = DiffParser(patch.diff)
                files_changed = diff_parser.get_changed_files()
            elif revision:
                try:
                    files_changed = _get_revision_changed_files(repository, revision)
                except MissingRevision:
                    return error("Unable to find commit %s in %s." % (args.sha, repository.url),
                                 problems=['sha', 'repository'])
            else:
                # the only way that revision can be null is if this repo does not have a vcs backend
                logging.warning('Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.', sha, repository.url)
                files_changed = None
        else:
            # we won't be applying file whitelist, so there is no need to get the list of changed files.
            files_changed = None

        collection_id = uuid.uuid4()

        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s', project.slug)
                continue
            # 5. apply project whitelist as appropriate
            if args.project_whitelist is not None and project.slug not in args.project_whitelist:
                logging.info('Project %s is not in the supplied whitelist', project.slug)
                continue
            forced_sha = sha
            # TODO(dcramer): find_green_parent_sha needs to take branch
            # into account
            # if patch_file:
            #     forced_sha = find_green_parent_sha(
            #         project=project,
            #         sha=sha,
            #     )

            # 6. apply file whitelist as appropriate
            diff = None
            if patch is not None:
                diff = patch.diff
            if (
                apply_project_files_trigger and
                files_changed is not None and
                not files_changed_should_trigger_project(
                    files_changed, project, project_options[project.id], sha, diff)
            ):
                logging.info('Changed files do not trigger build for project %s', project.slug)
                continue
            # 7. create/ensure build
            build_message = None
            selective_testing_policy = SelectiveTestingPolicy.disabled
            if args.selective_testing and project_lib.contains_active_autogenerated_plan(project):
                if is_commit_build:
                    selective_testing_policy, reasons = get_selective_testing_policy(project, sha, diff)
                    if reasons:
                        if selective_testing_policy is SelectiveTestingPolicy.disabled:
                            reasons = ["Selective testing was requested but not done because:"] + ['    ' + m for m in reasons]
                        build_message = '\n'.join(reasons)
                else:
                    # NOTE: for diff builds, it makes sense to just do selective testing,
                    # since it will never become a parent build and will never be used to
                    # calculate revision results.
                    selective_testing_policy = SelectiveTestingPolicy.enabled
            if args.ensure_only:
                potentials = list(Build.query.filter(
                    Build.project_id == project.id,
                    Build.source.has(revision_sha=sha, patch=patch),
                ).order_by(
                    Build.date_created.desc()  # newest first
                ).limit(1))
                if len(potentials) == 0:
                    builds.append(create_build(
                        project=project,
                        collection_id=collection_id,
                        sha=forced_sha,
                        target=target,
                        label=label,
                        message=message,
                        author=author,
                        patch=patch,
                        source_data=patch_data,
                        tag=tag,
                        cause=cause,
                        snapshot_id=snapshot_id,
                        no_snapshot=no_snapshot,
                        selective_testing_policy=selective_testing_policy,
                    ))
                else:
                    builds.append(potentials[0])
            else:
                builds.append(create_build(
                    project=project,
                    collection_id=collection_id,
                    sha=forced_sha,
                    target=target,
                    label=label,
                    message=message,
                    author=author,
                    patch=patch,
                    source_data=patch_data,
                    tag=tag,
                    cause=cause,
                    snapshot_id=snapshot_id,
                    no_snapshot=no_snapshot,
                    selective_testing_policy=selective_testing_policy,
                ))

            if build_message:
                message = BuildMessage(
                    build=builds[-1],
                    text=build_message,
                )
                db.session.add(message)
                db.session.commit()

        return self.respond(builds)
예제 #17
0
    def post(self):
        """
        Create a new commit or diff build. The API roughly goes like this:

        1. Identify the project(s) to build for. This can be done by specifying
        ``project``, ``repository``, or ``repository[callsign]``. If a repository is
        specified somehow, then all projects for that repository are considered
        for building.

        2. Using the ``sha``, find the appropriate revision object. This may
        involve updating the repo.

        3. If ``patch`` is given, then apply the patch and mark this as a diff build.
        Otherwise, this is a commit build.

        4. If provided, apply project_whitelist, filtering out projects not in
        this whitelist.

        5. Based on the flag ``apply_project_files_trigger`` (see comment on the argument
        itself for default values), decide whether or not to filter out projects
        by file blacklist and whitelist.

        6. Attach metadata and create/ensure existence of a build for each project,
        depending on the flag ``ensure_only``.

        NOTE: In ensure-only mode, the collection_ids of the returned builds are
        not necessarily identical, as we give new builds new collection IDs
        and preserve the existing builds' collection IDs.

        NOTE: If ``patch`` is specified ``sha`` is assumed to be the original
        base revision to apply the patch.

        Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev
        used to apply the patch. See ``find_green_parent_sha`` for the logic of
        identifying the correct revision.
        """
        args = self.parser.parse_args()

        if args.patch_file and args.ensure_only:
            return error("Ensure-only mode does not work with a diff build yet.", problems=["patch", "ensure_only"])

        if not (args.project or args.repository or args['repository[phabricator.callsign]']):
            return error("Project or repository must be specified",
                         problems=["project", "repository",
                                   "repository[phabricator.callsign]"])

        # read arguments
        if args.patch_data:
            try:
                patch_data = json.loads(args.patch_data)
            except Exception:
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])

            if not isinstance(patch_data, dict):
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])
        else:
            patch_data = None

        # 1. identify project(s)
        projects, repository = try_get_projects_and_repository(args)

        if not projects:
            return error("Unable to find project(s).")

        # read arguments
        label = args.label
        author = args.author
        message = args.message
        tag = args.tag

        if not tag and args.patch_file:
            tag = 'patch'

        # 2. find revision
        try:
            revision = identify_revision(repository, args.sha)
        except MissingRevision:
            # if the default fails, we absolutely can't continue and the
            # client should send a valid revision
            return error("Unable to find commit %s in %s." % (
                args.sha, repository.url), problems=['sha', 'repository'])

        # get default values for arguments
        if revision:
            if not author:
                author = revision.author
            if not label:
                label = revision.subject
            # only default the message if its absolutely not set
            if message is None:
                message = revision.message
            sha = revision.sha
        else:
            sha = args.sha

        if not args.target:
            target = sha[:12]
        else:
            target = args.target[:128]

        if not label:
            if message:
                label = message.splitlines()[0]
            if not label:
                label = 'A homeless build'
        label = label[:128]

        # 3. Check for patch
        if args.patch_file:
            fp = StringIO()
            for line in args.patch_file:
                fp.write(line)
            patch_file = fp
        else:
            patch_file = None

        if patch_file:
            patch = Patch(
                repository=repository,
                parent_revision_sha=sha,
                diff=patch_file.getvalue(),
            )
            db.session.add(patch)
        else:
            patch = None

        project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist'])

        # mark as commit or diff build
        if not patch:
            is_commit_build = True
        else:
            is_commit_build = False

        apply_project_files_trigger = args.apply_project_files_trigger
        if apply_project_files_trigger is None:
            apply_project_files_trigger = args.apply_file_whitelist
        if apply_project_files_trigger is None:
            if is_commit_build:
                apply_project_files_trigger = False
            else:
                apply_project_files_trigger = True

        if apply_project_files_trigger:
            if patch:
                diff_parser = DiffParser(patch.diff)
                files_changed = diff_parser.get_changed_files()
            elif revision:
                try:
                    files_changed = _get_revision_changed_files(repository, revision)
                except MissingRevision:
                    return error("Unable to find commit %s in %s." % (
                        args.sha, repository.url), problems=['sha', 'repository'])
            else:
                # the only way that revision can be null is if this repo does not have a vcs backend
                logging.warning('Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.', sha, repository.url)
                files_changed = None
        else:
            # we won't be applying file whitelist, so there is no need to get the list of changed files.
            files_changed = None

        collection_id = uuid.uuid4()
        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s', project.slug)
                continue
            # 4. apply project whitelist as appropriate
            if args.project_whitelist is not None and project.slug not in args.project_whitelist:
                logging.info('Project %s is not in the supplied whitelist', project.slug)
                continue
            forced_sha = sha
            # TODO(dcramer): find_green_parent_sha needs to take branch
            # into account
            # if patch_file:
            #     forced_sha = find_green_parent_sha(
            #         project=project,
            #         sha=sha,
            #     )

            # 5. apply file whitelist as appropriate
            diff = None
            if patch is not None:
                diff = patch.diff
            try:
                if (
                    apply_project_files_trigger
                    and files_changed is not None
                    and not files_changed_should_trigger_project(
                        files_changed, project, project_options[project.id], sha, diff)
                ):
                    logging.info('Changed files do not trigger build for project %s', project.slug)
                    continue
            except InvalidDiffError:
                # ok, the build will fail and the user will be notified.
                pass
            except ProjectConfigError:
                author_name = '(Unknown)'
                if author:
                    author_name = author.name
                logging.error('Project config for project %s is not in a valid format. Author is %s.', project.slug, author_name, exc_info=True)

            # 6. create/ensure build
            if args.ensure_only:
                potentials = list(Build.query.filter(
                    Build.project_id == project.id,
                    Build.source.has(revision_sha=sha, patch=patch),
                ).order_by(
                    Build.date_created.desc()  # newest first
                ).limit(1))
                if len(potentials) == 0:
                    builds.append(create_build(
                        project=project,
                        collection_id=collection_id,
                        sha=forced_sha,
                        target=target,
                        label=label,
                        message=message,
                        author=author,
                        patch=patch,
                        source_data=patch_data,
                        tag=tag,
                    ))
                else:
                    builds.append(potentials[0])
            else:
                builds.append(create_build(
                    project=project,
                    collection_id=collection_id,
                    sha=forced_sha,
                    target=target,
                    label=label,
                    message=message,
                    author=author,
                    patch=patch,
                    source_data=patch_data,
                    tag=tag,
                ))

        return self.respond(builds)
예제 #18
0
    def post(self):
        """
        Create a new commit or diff build. The API roughly goes like this:

        1. Identify the project(s) to build for. This can be done by specifying
        ``project``, ``repository``, or ``repository[callsign]``. If a repository is
        specified somehow, then all projects for that repository are considered
        for building.

        2. Using the ``sha``, find the appropriate revision object. This may
        involve updating the repo.

        3. If ``patch`` is given, then apply the patch and mark this as a diff build.
        Otherwise, this is a commit build.

        4. If ``snapshot_id`` is given, verify that the snapshot can be used by all
        projects.

        5. If provided, apply project_whitelist, filtering out projects not in
        this whitelist.

        6. Based on the flag ``apply_project_files_trigger`` (see comment on the argument
        itself for default values), decide whether or not to filter out projects
        by file blacklist and whitelist.

        7. Attach metadata and create/ensure existence of a build for each project,
        depending on the flag ``ensure_only``.

        NOTE: In ensure-only mode, the collection_ids of the returned builds are
        not necessarily identical, as we give new builds new collection IDs
        and preserve the existing builds' collection IDs.

        NOTE: If ``patch`` is specified ``sha`` is assumed to be the original
        base revision to apply the patch.

        Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev
        used to apply the patch. See ``find_green_parent_sha`` for the logic of
        identifying the correct revision.
        """
        args = self.parser.parse_args()

        if args.patch_file and args.ensure_only:
            return error(
                "Ensure-only mode does not work with a diff build yet.",
                problems=["patch", "ensure_only"])

        if not (args.project or args.repository
                or args['repository[phabricator.callsign]']):
            return error("Project or repository must be specified",
                         problems=[
                             "project", "repository",
                             "repository[phabricator.callsign]"
                         ])

        # read arguments
        if args.patch_data:
            try:
                patch_data = json.loads(args.patch_data)
            except Exception:
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])

            if not isinstance(patch_data, dict):
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])
        else:
            patch_data = None

        # 1. identify project(s)
        projects, repository = try_get_projects_and_repository(args)

        if not projects:
            return error("Unable to find project(s).")

        # read arguments
        label = args.label
        author = args.author
        message = args.message
        tag = args.tag
        snapshot_id = args.snapshot_id
        no_snapshot = args.no_snapshot

        if no_snapshot and snapshot_id:
            return error("Cannot specify snapshot with no_snapshot option")

        if not tag and args.patch_file:
            tag = 'patch'

        # 2. validate snapshot
        if snapshot_id:
            snapshot = Snapshot.query.get(snapshot_id)
            if not snapshot:
                return error("Unable to find snapshot.")
            if snapshot.status != SnapshotStatus.active:
                return error("Snapshot is in an invalid state: %s" %
                             snapshot.status)
            for project in projects:
                plans = get_build_plans(project)
                for plan in plans:
                    plan_options = plan.get_item_options()
                    allow_snapshot = '1' == plan_options.get(
                        'snapshot.allow', '0') or plan.snapshot_plan
                    if allow_snapshot and not SnapshotImage.get(
                            plan, snapshot_id):
                        # We want to create a build using a specific snapshot but no image
                        # was found for this plan so fail.
                        return error("Snapshot cannot be applied to %s's %s" %
                                     (project.slug, plan.label))

        # 3. find revision
        try:
            revision = identify_revision(repository, args.sha)
        except MissingRevision:
            # if the default fails, we absolutely can't continue and the
            # client should send a valid revision
            return error("Unable to find commit %s in %s." %
                         (args.sha, repository.url),
                         problems=['sha', 'repository'])

        # get default values for arguments
        if revision:
            if not author:
                author = revision.author
            if not label:
                label = revision.subject
            # only default the message if its absolutely not set
            if message is None:
                message = revision.message
            sha = revision.sha
        else:
            sha = args.sha

        if not args.target:
            target = sha[:12]
        else:
            target = args.target[:128]

        if not label:
            if message:
                label = message.splitlines()[0]
            if not label:
                label = 'A homeless build'
        label = label[:128]

        # 4. Check for patch
        if args.patch_file:
            fp = StringIO()
            for line in args.patch_file:
                fp.write(line)
            patch_file = fp
        else:
            patch_file = None

        if patch_file:
            patch = Patch(
                repository=repository,
                parent_revision_sha=sha,
                diff=patch_file.getvalue(),
            )
            db.session.add(patch)
        else:
            patch = None

        project_options = ProjectOptionsHelper.get_options(
            projects, ['build.file-whitelist'])

        # mark as commit or diff build
        if not patch:
            is_commit_build = True
        else:
            is_commit_build = False

        apply_project_files_trigger = args.apply_project_files_trigger
        if apply_project_files_trigger is None:
            apply_project_files_trigger = args.apply_file_whitelist
        if apply_project_files_trigger is None:
            if is_commit_build:
                apply_project_files_trigger = False
            else:
                apply_project_files_trigger = True

        if apply_project_files_trigger:
            if patch:
                diff_parser = DiffParser(patch.diff)
                files_changed = diff_parser.get_changed_files()
            elif revision:
                try:
                    files_changed = _get_revision_changed_files(
                        repository, revision)
                except MissingRevision:
                    return error("Unable to find commit %s in %s." %
                                 (args.sha, repository.url),
                                 problems=['sha', 'repository'])
            else:
                # the only way that revision can be null is if this repo does not have a vcs backend
                logging.warning(
                    'Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.',
                    sha, repository.url)
                files_changed = None
        else:
            # we won't be applying file whitelist, so there is no need to get the list of changed files.
            files_changed = None

        collection_id = uuid.uuid4()
        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s',
                                project.slug)
                continue
            # 5. apply project whitelist as appropriate
            if args.project_whitelist is not None and project.slug not in args.project_whitelist:
                logging.info('Project %s is not in the supplied whitelist',
                             project.slug)
                continue
            forced_sha = sha
            # TODO(dcramer): find_green_parent_sha needs to take branch
            # into account
            # if patch_file:
            #     forced_sha = find_green_parent_sha(
            #         project=project,
            #         sha=sha,
            #     )

            # 6. apply file whitelist as appropriate
            diff = None
            if patch is not None:
                diff = patch.diff
            try:
                if (apply_project_files_trigger and files_changed is not None
                        and not files_changed_should_trigger_project(
                            files_changed, project,
                            project_options[project.id], sha, diff)):
                    logging.info(
                        'Changed files do not trigger build for project %s',
                        project.slug)
                    continue
            except InvalidDiffError:
                # ok, the build will fail and the user will be notified.
                pass
            except ProjectConfigError:
                author_name = '(Unknown)'
                if author:
                    author_name = author.name
                logging.error(
                    'Project config for project %s is not in a valid format. Author is %s.',
                    project.slug,
                    author_name,
                    exc_info=True)

            # 7. create/ensure build
            if args.ensure_only:
                potentials = list(
                    Build.query.filter(
                        Build.project_id == project.id,
                        Build.source.has(revision_sha=sha, patch=patch),
                    ).order_by(Build.date_created.desc()  # newest first
                               ).limit(1))
                if len(potentials) == 0:
                    builds.append(
                        create_build(project=project,
                                     collection_id=collection_id,
                                     sha=forced_sha,
                                     target=target,
                                     label=label,
                                     message=message,
                                     author=author,
                                     patch=patch,
                                     source_data=patch_data,
                                     tag=tag,
                                     snapshot_id=snapshot_id,
                                     no_snapshot=no_snapshot))
                else:
                    builds.append(potentials[0])
            else:
                builds.append(
                    create_build(project=project,
                                 collection_id=collection_id,
                                 sha=forced_sha,
                                 target=target,
                                 label=label,
                                 message=message,
                                 author=author,
                                 patch=patch,
                                 source_data=patch_data,
                                 tag=tag,
                                 snapshot_id=snapshot_id,
                                 no_snapshot=no_snapshot))

        return self.respond(builds)
예제 #19
0
    def post_impl(self):
        """
        Notify Changes of a newly created diff.

        Depending on system configuration, this may create 0 or more new builds,
        and the resulting response will be a list of those build objects.
        """

        # we manually check for arg presence here so we can send a more specific
        # error message to the user (rather than a plain 400)
        args = self.parser.parse_args()
        if not args.repository:
            # No need to postback a comment for this
            statsreporter.stats().incr("diffs_repository_not_found")
            return error("Repository not found")

        repository = args.repository

        projects = list(Project.query.options(
            subqueryload_all('plans'),
        ).filter(
            Project.status == ProjectStatus.active,
            Project.repository_id == repository.id,
        ))

        # no projects bound to repository
        if not projects:
            return self.respond([])

        options = dict(
            db.session.query(
                ProjectOption.project_id, ProjectOption.value
            ).filter(
                ProjectOption.project_id.in_([p.id for p in projects]),
                ProjectOption.name.in_([
                    'phabricator.diff-trigger',
                ])
            )
        )

        # Filter out projects that aren't configured to run builds off of diffs
        # - Diff trigger disabled
        # - No build plans
        projects = [
            p for p in projects
            if options.get(p.id, '1') == '1' and get_build_plans(p)
        ]

        if not projects:
            return self.respond([])

        statsreporter.stats().incr('diffs_posted_from_phabricator')

        label = args.label[:128]
        author = args.author
        message = args.message
        sha = args.sha
        target = 'D%s' % args['phabricator.revisionID']

        try:
            identify_revision(repository, sha)
        except MissingRevision:
            # This may just be a broken request (which is why we respond with a 400) but
            # it also might indicate Phabricator and Changes being out of sync somehow,
            # so we err on the side of caution and log it as an error.
            logging.error("Diff %s was posted for an unknown revision (%s, %s)",
                          target, sha, repository.url)
            # We should postback since this can happen if a user diffs dependent revisions
            statsreporter.stats().incr("diff_missing_base_revision")
            return self.postback_error(
                "Unable to find base revision {revision} in {repo} on Changes. Some possible reasons:\n"
                " - Changes hasn't picked up {revision} yet. Retry in a minute\n"
                " - {revision} only exists in your local copy. Changes cannot apply your patch\n".format(
                    revision=sha,
                    repo=repository.url,
                ),
                target,
                problems=['sha', 'repository'])

        source_data = {
            'phabricator.buildTargetPHID': args['phabricator.buildTargetPHID'],
            'phabricator.diffID': args['phabricator.diffID'],
            'phabricator.revisionID': args['phabricator.revisionID'],
            'phabricator.revisionURL': args['phabricator.revisionURL'],
        }

        patch = Patch(
            repository=repository,
            parent_revision_sha=sha,
            diff=''.join(line.decode('utf-8') for line in args.patch_file),
        )
        db.session.add(patch)

        source = Source(
            patch=patch,
            repository=repository,
            revision_sha=sha,
            data=source_data,
        )
        db.session.add(source)

        phabricatordiff = try_create(PhabricatorDiff, {
            'diff_id': args['phabricator.diffID'],
            'revision_id': args['phabricator.revisionID'],
            'url': args['phabricator.revisionURL'],
            'source': source,
        })
        if phabricatordiff is None:
            logging.warning("Diff %s, Revision %s already exists",
                            args['phabricator.diffID'], args['phabricator.revisionID'])
            # No need to inform user about this explicitly
            statsreporter.stats().incr("diffs_already_exists")
            return error("Diff already exists within Changes")

        project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist'])
        diff_parser = DiffParser(patch.diff)
        files_changed = diff_parser.get_changed_files()

        collection_id = uuid.uuid4()
        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            # We already filtered out empty build plans
            assert plan_list, ('No plans defined for project {}'.format(project.slug))

            try:
                if not files_changed_should_trigger_project(files_changed, project, project_options[project.id], sha, diff=patch.diff):
                    logging.info('No changed files matched project trigger for project %s', project.slug)
                    continue
            except InvalidDiffError:
                # ok, the build will fail and the user will be notified
                pass
            except ProjectConfigError:
                logging.error('Project config for project %s is not in a valid format. Author is %s.', project.slug, author.name, exc_info=True)

            builds.append(create_build(
                project=project,
                collection_id=collection_id,
                sha=sha,
                target=target,
                label=label,
                message=message,
                author=author,
                patch=patch,
                tag="phabricator",
            ))

        # This is the counterpoint to the above 'diffs_posted_from_phabricator';
        # at this point we've successfully processed the diff, so comparing this
        # stat to the above should give us the phabricator diff failure rate.
        statsreporter.stats().incr('diffs_successfully_processed_from_phabricator')

        return self.respond(builds)
예제 #20
0
    def post_impl(self):
        """
        Notify Changes of a newly created diff.

        Depending on system configuration, this may create 0 or more new builds,
        and the resulting response will be a list of those build objects.
        """

        # we manually check for arg presence here so we can send a more specific
        # error message to the user (rather than a plain 400)
        args = self.parser.parse_args()
        if not args.repository:
            # No need to postback a comment for this
            statsreporter.stats().incr("diffs_repository_not_found")
            return error("Repository not found")

        repository = args.repository

        projects = list(
            Project.query.options(subqueryload_all('plans'), ).filter(
                Project.status == ProjectStatus.active,
                Project.repository_id == repository.id,
            ))

        # no projects bound to repository
        if not projects:
            return self.respond([])

        options = dict(
            db.session.query(
                ProjectOption.project_id, ProjectOption.value).filter(
                    ProjectOption.project_id.in_([p.id for p in projects]),
                    ProjectOption.name.in_([
                        'phabricator.diff-trigger',
                    ])))

        # Filter out projects that aren't configured to run builds off of diffs
        # - Diff trigger disabled
        # - No build plans
        projects = [
            p for p in projects
            if options.get(p.id, '1') == '1' and get_build_plans(p)
        ]

        if not projects:
            return self.respond([])

        statsreporter.stats().incr('diffs_posted_from_phabricator')

        label = args.label[:128]
        author = args.author
        message = args.message
        sha = args.sha
        target = 'D%s' % args['phabricator.revisionID']

        try:
            identify_revision(repository, sha)
        except MissingRevision:
            # This may just be a broken request (which is why we respond with a 400) but
            # it also might indicate Phabricator and Changes being out of sync somehow,
            # so we err on the side of caution and log it as an error.
            logging.error(
                "Diff %s was posted for an unknown revision (%s, %s)", target,
                sha, repository.url)
            # We should postback since this can happen if a user diffs dependent revisions
            statsreporter.stats().incr("diffs_missing_base_revision")
            return self.postback_error(
                "Unable to find base revision {revision} in {repo} on Changes. Some possible reasons:\n"
                " - You may be working on multiple stacked diffs in your local repository.\n"
                "   {revision} only exists in your local copy. Changes thus cannot apply your patch\n"
                " - If you are sure that's not the case, it's possible you applied your patch to an extremely\n"
                "   recent revision which Changes hasn't picked up yet. Retry in a minute\n"
                .format(
                    revision=sha,
                    repo=repository.url,
                ),
                target,
                problems=['sha', 'repository'])

        source_data = {
            'phabricator.buildTargetPHID': args['phabricator.buildTargetPHID'],
            'phabricator.diffID': args['phabricator.diffID'],
            'phabricator.revisionID': args['phabricator.revisionID'],
            'phabricator.revisionURL': args['phabricator.revisionURL'],
        }

        patch = Patch(
            repository=repository,
            parent_revision_sha=sha,
            diff=''.join(line.decode('utf-8') for line in args.patch_file),
        )
        db.session.add(patch)

        source = Source(
            patch=patch,
            repository=repository,
            revision_sha=sha,
            data=source_data,
        )
        db.session.add(source)

        phabricatordiff = try_create(
            PhabricatorDiff, {
                'diff_id': args['phabricator.diffID'],
                'revision_id': args['phabricator.revisionID'],
                'url': args['phabricator.revisionURL'],
                'source': source,
            })
        if phabricatordiff is None:
            logging.warning("Diff %s, Revision %s already exists",
                            args['phabricator.diffID'],
                            args['phabricator.revisionID'])
            # No need to inform user about this explicitly
            statsreporter.stats().incr("diffs_already_exists")
            return error("Diff already exists within Changes")

        project_options = ProjectOptionsHelper.get_options(
            projects, ['build.file-whitelist'])
        diff_parser = DiffParser(patch.diff)
        files_changed = diff_parser.get_changed_files()

        collection_id = uuid.uuid4()
        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            # We already filtered out empty build plans
            assert plan_list, ('No plans defined for project {}'.format(
                project.slug))

            try:
                if not files_changed_should_trigger_project(
                        files_changed,
                        project,
                        project_options[project.id],
                        sha,
                        diff=patch.diff):
                    logging.info(
                        'No changed files matched project trigger for project %s',
                        project.slug)
                    continue
            except InvalidDiffError:
                # ok, the build will fail and the user will be notified
                pass
            except ProjectConfigError:
                logging.error(
                    'Project config for project %s is not in a valid format. Author is %s.',
                    project.slug,
                    author.name,
                    exc_info=True)

            builds.append(
                create_build(
                    project=project,
                    collection_id=collection_id,
                    sha=sha,
                    target=target,
                    label=label,
                    message=message,
                    author=author,
                    patch=patch,
                    tag="phabricator",
                ))

        # This is the counterpoint to the above 'diffs_posted_from_phabricator';
        # at this point we've successfully processed the diff, so comparing this
        # stat to the above should give us the phabricator diff failure rate.
        statsreporter.stats().incr(
            'diffs_successfully_processed_from_phabricator')

        return self.respond(builds)
예제 #21
0
    def post_impl(self):
        """
        Notify Changes of a newly created diff.

        Depending on system configuration, this may create 0 or more new builds,
        and the resulting response will be a list of those build objects.
        """
        args = self.parser.parse_args()

        repository = args.repository
        if not args.repository:
            return error("Repository not found")

        projects = list(Project.query.options(
            subqueryload_all('plans'),
        ).filter(
            Project.status == ProjectStatus.active,
            Project.repository_id == repository.id,
        ))

        # no projects bound to repository
        if not projects:
            return self.respond([])

        options = dict(
            db.session.query(
                ProjectOption.project_id, ProjectOption.value
            ).filter(
                ProjectOption.project_id.in_([p.id for p in projects]),
                ProjectOption.name.in_([
                    'phabricator.diff-trigger',
                ])
            )
        )

        projects = [
            p for p in projects
            if options.get(p.id, '1') == '1'
        ]

        if not projects:
            return self.respond([])

        statsreporter.stats().incr('diffs_posted_from_phabricator')

        label = args.label[:128]
        author = args.author
        message = args.message
        sha = args.sha
        target = 'D{}'.format(args['phabricator.revisionID'])

        try:
            identify_revision(repository, sha)
        except MissingRevision:
            # This may just be a broken request (which is why we respond with a 400) but
            # it also might indicate Phabricator and Changes being out of sync somehow,
            # so we err on the side of caution and log it as an error.
            logging.error("Diff %s was posted for an unknown revision (%s, %s)",
                          target, sha, repository.url)
            return error("Unable to find commit %s in %s." % (
                sha, repository.url), problems=['sha', 'repository'])

        source_data = {
            'phabricator.buildTargetPHID': args['phabricator.buildTargetPHID'],
            'phabricator.diffID': args['phabricator.diffID'],
            'phabricator.revisionID': args['phabricator.revisionID'],
            'phabricator.revisionURL': args['phabricator.revisionURL'],
        }

        patch = Patch(
            repository=repository,
            parent_revision_sha=sha,
            diff=''.join(line.decode('utf-8') for line in args.patch_file),
        )
        db.session.add(patch)

        source = Source(
            patch=patch,
            repository=repository,
            revision_sha=sha,
            data=source_data,
        )
        db.session.add(source)

        phabricatordiff = try_create(PhabricatorDiff, {
            'diff_id': args['phabricator.diffID'],
            'revision_id': args['phabricator.revisionID'],
            'url': args['phabricator.revisionURL'],
            'source': source,
        })
        if phabricatordiff is None:
            logging.warning("Diff %s, Revision %s already exists",
                            args['phabricator.diffID'], args['phabricator.revisionID'])
            return error("Diff already exists within Changes")

        project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist'])
        diff_parser = DiffParser(patch.diff)
        files_changed = diff_parser.get_changed_files()

        collection_id = uuid.uuid4()
        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s', project.slug)
                continue

            try:
                if not files_changed_should_trigger_project(files_changed, project, project_options[project.id], sha, diff=patch.diff):
                    logging.info('No changed files matched project trigger for project %s', project.slug)
                    continue
            except InvalidDiffError:
                # ok, the build will fail and the user will be notified
                pass
            except ProjectConfigError:
                logging.error('Project config for project %s is not in a valid format. Author is %s.', project.slug, author.name, exc_info=True)

            builds.append(create_build(
                project=project,
                collection_id=collection_id,
                sha=sha,
                target=target,
                label=label,
                message=message,
                author=author,
                patch=patch,
                tag="phabricator",
            ))

        # This is the counterpoint to the above 'diffs_posted_from_phabricator';
        # at this point we've successfully processed the diff, so comparing this
        # stat to the above should give us the phabricator diff failure rate.
        statsreporter.stats().incr('diffs_successfully_processed_from_phabricator')

        return self.respond(builds)
예제 #22
0
    def post(self):
        """
        Notify Changes of a newly created diff.

        Depending on system configuration, this may create 0 or more new builds,
        and the resulting response will be a list of those build objects.
        """
        args = self.parser.parse_args()

        repository = args.repository
        if not args.repository:
            return error("Repository not found")

        projects = list(
            Project.query.options(subqueryload_all('plans'), ).filter(
                Project.status == ProjectStatus.active,
                Project.repository_id == repository.id,
            ))

        # no projects bound to repository
        if not projects:
            return self.respond([])

        options = dict(
            db.session.query(
                ProjectOption.project_id, ProjectOption.value).filter(
                    ProjectOption.project_id.in_([p.id for p in projects]),
                    ProjectOption.name.in_([
                        'phabricator.diff-trigger',
                    ])))

        projects = [p for p in projects if options.get(p.id, '1') == '1']

        if not projects:
            return self.respond([])

        statsreporter.stats().incr('diffs_posted_from_phabricator')

        label = args.label[:128]
        author = args.author
        message = args.message
        sha = args.sha
        target = 'D{}'.format(args['phabricator.revisionID'])

        try:
            identify_revision(repository, sha)
        except MissingRevision:
            # This may just be a broken request (which is why we respond with a 400) but
            # it also might indicate Phabricator and Changes being out of sync somehow,
            # so we err on the side of caution and log it as an error.
            logging.error(
                "Diff %s was posted for an unknown revision (%s, %s)", target,
                sha, repository.url)
            return error("Unable to find commit %s in %s." %
                         (sha, repository.url),
                         problems=['sha', 'repository'])

        source_data = {
            'phabricator.buildTargetPHID': args['phabricator.buildTargetPHID'],
            'phabricator.diffID': args['phabricator.diffID'],
            'phabricator.revisionID': args['phabricator.revisionID'],
            'phabricator.revisionURL': args['phabricator.revisionURL'],
        }

        patch = Patch(
            repository=repository,
            parent_revision_sha=sha,
            diff=''.join(args.patch_file),
        )
        db.session.add(patch)

        source = Source(
            patch=patch,
            repository=repository,
            revision_sha=sha,
            data=source_data,
        )
        db.session.add(source)

        phabricatordiff = try_create(
            PhabricatorDiff, {
                'diff_id': args['phabricator.diffID'],
                'revision_id': args['phabricator.revisionID'],
                'url': args['phabricator.revisionURL'],
                'source': source,
            })
        if phabricatordiff is None:
            logging.error("Diff %s, Revision %s already exists",
                          args['phabricator.diffID'],
                          args['phabricator.revisionID'])
            return error("Diff already exists within Changes")

        project_options = ProjectOptionsHelper.get_options(
            projects, ['build.file-whitelist'])
        diff_parser = DiffParser(patch.diff)
        files_changed = diff_parser.get_changed_files()

        collection_id = uuid.uuid4()
        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s',
                                project.slug)
                continue

            try:
                if not files_changed_should_trigger_project(
                        files_changed,
                        project,
                        project_options[project.id],
                        sha,
                        diff=patch.diff):
                    logging.info(
                        'No changed files matched project trigger for project %s',
                        project.slug)
                    continue
            except InvalidDiffError:
                # ok, the build will fail and the user will be notified
                pass
            except ProjectConfigError:
                logging.error(
                    'Project config for project %s is not in a valid format. Author is %s.',
                    project.slug,
                    author.name,
                    exc_info=True)

            builds.append(
                create_build(
                    project=project,
                    collection_id=collection_id,
                    sha=sha,
                    target=target,
                    label=label,
                    message=message,
                    author=author,
                    patch=patch,
                    tag="phabricator",
                ))
        # This is the counterpoint to the above 'diffs_posted_from_phabricator';
        # at this point we've successfully processed the diff, so comparing this
        # stat to the above should give us the phabricator diff failure rate.
        statsreporter.stats().incr(
            'diffs_successfully_processed_from_phabricator')

        return self.respond(builds)