Beispiel #1
0
def _sync_project_admin_users(project_admin_mapping):
    # type: (Dict[str, Set[str]]) -> None
    """This synchronizes the Changes user database so that only people
    in `project_admin_mapping` are project admins, and that they are
    admins only for the projects they have permissions to.  Note that
    if a user who should be a project admin does not exist in the Changes
    database, the user is created.

    Args:
        project_admin_mapping (Dict[str, Set[str]]): The mapping from
            user emails to project patterns
    """
    args = [~User.project_permissions.is_(None)]
    if len(project_admin_mapping) > 0:
        args.append(~User.email.in_(project_admin_mapping.keys()))
    User.query.filter(*args).update({'project_permissions': None},
                                    synchronize_session=False)
    for email, project_permissions in project_admin_mapping.iteritems():
        create_or_update(
            User,
            where={
                'email': email,
            },
            values={'project_permissions': list(project_permissions)})
    db.session.commit()
Beispiel #2
0
def _sync_admin_users(admin_emails):
    # type: (Set[str]) -> None
    """Take a look at the Changes user database. Every user with email in
    `admin_emails` should become a Changes admin, and every user already
    an admin whose email is not in `admin_emails` will have their
    admin privileges revoked. Note that if a user who should be an admin
    does not exist in the Changes database, the user is created.

    Args:
        admin_emails (iterable[basestring]): an iterable of usernames of
            people who should be admin.
    """
    # revoke access for people who should not have admin access
    assert len(admin_emails) > 0
    User.query.filter(
        ~User.email.in_(admin_emails),
        User.is_admin.is_(True),
    ).update({
        'is_admin': False,
    }, synchronize_session=False)

    # give access for people who should have access
    for email in admin_emails:
        create_or_update(User,
                         where={
                             'email': email,
                         },
                         values={
                             'is_admin': True,
                         })
    db.session.commit()
Beispiel #3
0
    def post(self, snapshot_id):
        snapshot = Snapshot.query.get(snapshot_id)
        if snapshot is None:
            return '', 404

        args = self.parser.parse_args()

        if args.status:
            snapshot.status = SnapshotStatus[args.status]

        if args.set_current and snapshot.status != SnapshotStatus.active:
            return '{"error": "Cannot set inactive current snapshot"}', 400

        db.session.add(snapshot)
        db.session.commit()

        if args.set_current:
            # TODO(adegtiar): improve logic for picking current snapshot.
            create_or_update(ProjectOption, where={
                'project': snapshot.project,
                'name': 'snapshot.current',
            }, values={
                'value': snapshot.id.hex,
            })

        return self.respond(snapshot)
    def post(self, project_id):
        project = self._get_project(project_id)
        if project is None:
            return error("Project not found", http_code=404)

        args = self.parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            # If we're rolling back a snapshot, take note.
            if name == 'snapshot.current':
                current = Snapshot.get_current(project.id)
                if current:
                    # If value is empty, we're deactivating a snapshot without replacing it,
                    # and that's a downgrade too.
                    replacement = value and Snapshot.query.get(value)
                    if not replacement or replacement.date_created < current.date_created:
                        _report_snapshot_downgrade(project)

            create_or_update(ProjectOption, where={
                'project': project,
                'name': name,
            }, values={
                'value': value,
            })

        return self.respond({})
Beispiel #5
0
    def post(self, project_id):
        project = self._get_project(project_id)
        if project is None:
            return error("Project not found", http_code=404)

        args = self.parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            # If we're rolling back a snapshot, take note.
            if name == 'snapshot.current':
                current = Snapshot.get_current(project.id)
                if current:
                    # If value is empty, we're deactivating a snapshot without replacing it,
                    # and that's a downgrade too.
                    replacement = value and Snapshot.query.get(value)
                    if not replacement or replacement.date_created < current.date_created:
                        _report_snapshot_downgrade(project)

            create_or_update(ProjectOption,
                             where={
                                 'project': project,
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        return self.respond({})
Beispiel #6
0
 def _record_test_rerun_counts(self, test_list):
     create_or_update(ItemStat, where={
         'item_id': self.step.id,
         'name': 'test_rerun_count',
     }, values={
         'value': sum(1 for t in test_list if t.reruns),
     })
Beispiel #7
0
    def post(self, snapshot_id):
        snapshot = Snapshot.query.get(snapshot_id)
        if snapshot is None:
            return '', 404

        args = self.parser.parse_args()

        if args.status:
            snapshot.status = SnapshotStatus[args.status]

        if args.set_current and snapshot.status != SnapshotStatus.active:
            return '{"error": "Cannot set inactive current snapshot"}', 400

        db.session.add(snapshot)
        db.session.commit()

        if args.set_current:
            # TODO(adegtiar): improve logic for picking current snapshot.
            create_or_update(ProjectOption,
                             where={
                                 'project': snapshot.project,
                                 'name': 'snapshot.current',
                             },
                             values={
                                 'value': snapshot.id.hex,
                             })

        return self.respond(snapshot)
    def post(self, step_id):
        step = Step.query.get(step_id)
        if step is None:
            return error("step not found", http_code=404)

        args = self.parser.parse_args()

        if args.implementation is not None:
            step.implementation = args.implementation

        if args.data is not None:
            try:
                data = json.loads(args.data)
            except ValueError as e:
                return error("invalid JSON: %s" % e)

            if not isinstance(data, dict):
                return error("data must be a JSON mapping")

            impl_cls = step.get_implementation(load=False)
            if impl_cls is None:
                return error("unable to load build step implementation")

            try:
                # XXX(dcramer): It's important that we deepcopy data so any
                # mutations within the BuildStep don't propagate into the db
                impl_cls(**deepcopy(data))
            except Exception as exc:
                return error(
                    "unable to create build step mapping provided data: %s" %
                    exc)
            step.data = data

        if args.order is not None:
            step.order = args.order

        step.date_modified = datetime.utcnow()
        db.session.add(step)

        plan = step.plan
        plan.date_modified = step.date_modified
        db.session.add(plan)

        for name in STEP_OPTIONS.keys():
            value = args.get(name)
            if value is None:
                continue

            create_or_update(ItemOption,
                             where={
                                 'item_id': step.id,
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        db.session.commit()
        return self.respond(step)
Beispiel #9
0
 def _record_test_counts(self, test_list):
     create_or_update(ItemStat, where={
         'item_id': self.step.id,
         'name': 'test_count',
     }, values={
         'value': len(test_list),
     })
     db.session.commit()
Beispiel #10
0
 def _record_test_failures(self, test_list):
     create_or_update(ItemStat, where={
         'item_id': self.step.id,
         'name': 'test_failures',
     }, values={
         'value': sum(t.result == Result.failed for t in test_list),
     })
     db.session.commit()
    def post(self, repository_id):
        repo = Repository.query.get(repository_id)
        if repo is None:
            return '', 404

        args = self.parser.parse_args()

        if args.url:
            repo.url = args.url
        if args.backend:
            repo.backend = RepositoryBackend[args.backend]

        needs_import = False
        if args.status == 'inactive':
            repo.status = RepositoryStatus.inactive
        elif args.status == 'active' and repo.status == RepositoryStatus.inactive:
            repo.status = RepositoryStatus.active
            needs_import = True

        db.session.add(repo)

        for name in OPTION_DEFAULTS.keys():
            value = args[name]
            if value is None:
                continue

            # special case phabricator.callsign since we can't enforce a unique
            # constraint
            if name == 'phabricator.callsign':
                existing = ItemOption.query.filter(
                    ItemOption.item_id != repo.id,
                    ItemOption.name == name,
                    ItemOption.value == value,
                ).first()
                if existing:
                    return '{"error": "A repository already exists with the given Phabricator callsign"}', 400

            create_or_update(ItemOption,
                             where={
                                 'item_id': repo.id,
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        db.session.commit()

        if needs_import:
            import_repo.delay_if_needed(
                repo_id=repo.id.hex,
                task_id=repo.id.hex,
            )

        context = self.serialize(repo)
        context['options'] = self._get_options(repo)

        return self.respond(context, serialize=False)
Beispiel #12
0
    def post(self, step_id):
        step = Step.query.get(step_id)
        if step is None:
            return {"message": "step not found"}, 404

        args = self.parser.parse_args()

        if args.implementation is not None:
            step.implementation = args.implementation

        if args.data is not None:
            data = json.loads(args.data)
            if not isinstance(data, dict):
                return {"message": "data must be a JSON mapping"}, 400

            impl_cls = step.get_implementation(load=False)
            if impl_cls is None:
                return {
                    "message": "unable to load build step implementation"
                }, 400

            try:
                impl_cls(**data)
            except Exception:
                return {
                    "message":
                    "unable to create build step mapping provided data"
                }, 400
            step.data = data

        if args.order is not None:
            step.order = args.order

        step.date_modified = datetime.utcnow()
        db.session.add(step)

        plan = step.plan
        plan.date_modified = step.date_modified
        db.session.add(plan)

        for name in STEP_OPTIONS.keys():
            value = args.get(name)
            if value is None:
                continue

            create_or_update(ItemOption,
                             where={
                                 'item_id': step.id,
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        db.session.commit()

        return self.serialize(step), 200
Beispiel #13
0
def _record_test_duration(step):
    create_or_update(ItemStat, where={
        'item_id': step.id,
        'name': 'test_duration',
    }, values={
        'value': db.session.query(func.coalesce(func.sum(TestCase.duration), 0)).filter(
            TestCase.step_id == step.id,
        ).as_scalar(),
    })
Beispiel #14
0
    def post(self, plan_id):
        plan = Plan.query.get(plan_id)
        if plan is None:
            return error("plan not found", http_code=404)

        args = self.parser.parse_args()

        step = Step(
            plan=plan,
            order=args.order,
            implementation=args.implementation,
        )

        try:
            data = json.loads(args.data)
        except ValueError as e:
            db.session.rollback()
            return error("invalid JSON: %s" % e)
        if not isinstance(data, dict):
            db.session.rollback()
            return error("data must be a JSON mapping")

        impl_cls = step.get_implementation(load=False)
        if impl_cls is None:
            db.session.rollback()
            return error("unable to load build step implementation")

        try:
            # XXX(dcramer): It's important that we deepcopy data so any
            # mutations within the BuildStep don't propagate into the db
            impl_cls(**deepcopy(data))
        except Exception as exc:
            db.session.rollback()
            return error("unable to create build step provided data: %s" % exc)

        step.data = data
        step.order = args.order
        db.session.add(step)

        plan.date_modified = step.date_modified
        db.session.add(plan)

        for name in STEP_OPTIONS.keys():
            value = args.get(name)
            if value is None:
                continue

            create_or_update(ItemOption,
                             where={
                                 'item_id': step.id,
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        return self.serialize(step), 201
Beispiel #15
0
def _record_test_duration(step):
    create_or_update(ItemStat, where={
        'item_id': step.id,
        'name': 'test_duration',
    }, values={
        'value': db.session.query(func.coalesce(func.sum(TestCase.duration), 0)).filter(
            TestCase.step_id == step.id,
        ).as_scalar(),
    })
    def post(self, repository_id):
        repo = Repository.query.get(repository_id)
        if repo is None:
            return '', 404

        args = self.parser.parse_args()

        if args.url:
            repo.url = args.url
        if args.backend:
            repo.backend = RepositoryBackend[args.backend]

        needs_import = False
        if args.status == 'inactive':
            repo.status = RepositoryStatus.inactive
        elif args.status == 'active' and repo.status == RepositoryStatus.inactive:
            repo.status = RepositoryStatus.active
            needs_import = True

        db.session.add(repo)

        for name in OPTION_DEFAULTS.keys():
            value = args[name]
            if value is None:
                continue

            # special case phabricator.callsign since we can't enforce a unique
            # constraint
            if name == 'phabricator.callsign':
                existing = ItemOption.query.filter(
                    ItemOption.item_id != repo.id,
                    ItemOption.name == name,
                    ItemOption.value == value,
                ).first()
                if existing:
                    return '{"error": "A repository already exists with the given Phabricator callsign"}', 400

            create_or_update(ItemOption, where={
                'item_id': repo.id,
                'name': name,
            }, values={
                'value': value,
            })

        db.session.commit()

        if needs_import:
            import_repo.delay_if_needed(
                repo_id=repo.id.hex,
                task_id=repo.id.hex,
            )

        context = self.serialize(repo)
        context['options'] = self._get_options(repo)

        return self.respond(context, serialize=False)
Beispiel #17
0
    def post(self, step_id):
        step = Step.query.get(step_id)
        if step is None:
            return error("step not found", http_code=404)

        args = self.parser.parse_args()

        if args.implementation is not None:
            step.implementation = args.implementation

        if args.data is not None:
            try:
                data = json.loads(args.data)
            except ValueError as e:
                return error("invalid JSON: %s" % e)

            if not isinstance(data, dict):
                return error("data must be a JSON mapping")

            impl_cls = step.get_implementation(load=False)
            if impl_cls is None:
                return error("unable to load build step implementation")

            try:
                # XXX(dcramer): It's important that we deepcopy data so any
                # mutations within the BuildStep don't propagate into the db
                impl_cls(**deepcopy(data))
            except Exception as exc:
                return error("unable to create build step mapping provided data: %s" % exc)
            step.data = data

        if args.order is not None:
            step.order = args.order

        step.date_modified = datetime.utcnow()
        db.session.add(step)

        plan = step.plan
        plan.date_modified = step.date_modified
        db.session.add(plan)

        for name in STEP_OPTIONS.keys():
            value = args.get(name)
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': step.id,
                'name': name,
            }, values={
                'value': value,
            })

        db.session.commit()

        return self.serialize(step), 200
Beispiel #18
0
 def _record_test_counts(self, test_list):
     create_or_update(ItemStat, where={
         'item_id': self.step.id,
         'name': 'test_count',
     }, values={
         'value': db.session.query(func.count(TestCase.id)).filter(
             TestCase.step_id == self.step.id,
         ).as_scalar(),
     })
     db.session.commit()
Beispiel #19
0
def _record_test_rerun_counts(step):
    create_or_update(ItemStat, where={
        'item_id': step.id,
        'name': 'test_rerun_count',
    }, values={
        'value': db.session.query(func.count(TestCase.id)).filter(
            TestCase.step_id == step.id,
            TestCase.reruns > 0,
        ).as_scalar(),
    })
Beispiel #20
0
def _record_test_rerun_counts(step):
    create_or_update(ItemStat, where={
        'item_id': step.id,
        'name': 'test_rerun_count',
    }, values={
        'value': db.session.query(func.count(TestCase.id)).filter(
            TestCase.step_id == step.id,
            TestCase.reruns > 0,
        ).as_scalar(),
    })
Beispiel #21
0
def _record_test_failures(step):
    create_or_update(ItemStat, where={
        'item_id': step.id,
        'name': 'test_failures',
    }, values={
        'value': db.session.query(func.count(TestCase.id)).filter(
            TestCase.step_id == step.id,
            TestCase.result == Result.failed,
        ).as_scalar(),
    })
    db.session.commit()
Beispiel #22
0
def _record_test_failures(step):
    create_or_update(ItemStat, where={
        'item_id': step.id,
        'name': 'test_failures',
    }, values={
        'value': db.session.query(func.count(TestCase.id)).filter(
            TestCase.step_id == step.id,
            TestCase.result == Result.failed,
        ).as_scalar(),
    })
    db.session.commit()
Beispiel #23
0
    def _record_test_duration(self, test_list):
        if test_list:
            local_test_duration = sum(t.duration or 0 for t in test_list)
        else:
            local_test_duration = 0

        create_or_update(ItemStat, where={
            'item_id': self.step.id,
            'name': 'test_duration',
        }, values={
            'value': local_test_duration,
        })
Beispiel #24
0
 def _record_test_counts(self, test_list):
     create_or_update(
         ItemStat,
         where={
             'item_id': self.step.id,
             'name': 'test_count',
         },
         values={
             'value':
             db.session.query(func.count(TestCase.id)).filter(
                 TestCase.step_id == self.step.id, ).as_scalar(),
         })
     db.session.commit()
Beispiel #25
0
    def post(self, step_id):
        step = Step.query.get(step_id)
        if step is None:
            return {"message": "step not found"}, 404

        args = self.parser.parse_args()

        if args.implementation is not None:
            step.implementation = args.implementation

        if args.data is not None:
            data = json.loads(args.data)
            if not isinstance(data, dict):
                return {"message": "data must be a JSON mapping"}, 400

            impl_cls = step.get_implementation(load=False)
            if impl_cls is None:
                return {"message": "unable to load build step implementation"}, 400

            try:
                impl_cls(**data)
            except Exception:
                return {"message": "unable to create build step mapping provided data"}, 400
            step.data = data

        if args.order is not None:
            step.order = args.order

        step.date_modified = datetime.utcnow()
        db.session.add(step)

        plan = step.plan
        plan.date_modified = step.date_modified
        db.session.add(plan)

        for name in STEP_OPTIONS.keys():
            value = args.get(name)
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': step.id,
                'name': name,
            }, values={
                'value': value,
            })

        db.session.commit()

        return self.serialize(step), 200
Beispiel #26
0
    def post(self):
        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(SystemOption, where={
                'name': name,
            }, values={
                'value': value,
            })

        return '', 200
Beispiel #27
0
    def post(self, plan_id):
        plan = Plan.query.get(plan_id)
        if plan is None:
            return {"message": "plan not found"}, 404

        args = self.parser.parse_args()

        step = Step(
            plan=plan,
            order=args.order,
            implementation=args.implementation,
        )

        data = json.loads(args.data)
        if not isinstance(data, dict):
            return {"message": "data must be a JSON mapping"}, 400

        impl_cls = step.get_implementation(load=False)
        if impl_cls is None:
            return {"message": "unable to load build step implementation"}, 400

        try:
            # XXX(dcramer): It's important that we deepcopy data so any
            # mutations within the BuildStep don't propagate into the db
            impl_cls(**deepcopy(data))
        except Exception:
            return {"message": "unable to create build step provided data"}, 400

        step.data = data
        step.order = args.order
        db.session.add(step)

        plan.date_modified = step.date_modified
        db.session.add(plan)

        for name in STEP_OPTIONS.keys():
            value = args.get(name)
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': step.id,
                'name': name,
            }, values={
                'value': value,
            })

        return self.serialize(step), 201
Beispiel #28
0
    def save(self, repository):
        author = self._get_author(self.author)
        if self.author == self.committer:
            committer = author
        else:
            committer = self._get_author(self.committer)

        revision, created = create_or_update(Revision, where={
            'repository': repository,
            'sha': self.id,
        }, values={
            'author': author,
            'committer': committer,
            'message': self.message,
            'parents': self.parents,
            'branches': self.branches,
            'date_created': self.author_date,
            'date_committed': self.committer_date,
        })

        # we also want to create a source for this item as it's the canonical
        # representation in the UI
        try_create(Source, {
            'revision_sha': self.id,
            'repository': repository,
        })

        return (revision, created)
Beispiel #29
0
    def save(self, repository):
        author = self._get_author(self.author)
        if self.author == self.committer:
            committer = author
        else:
            committer = self._get_author(self.committer)

        revision, created = create_or_update(Revision, where={
            'repository': repository,
            'sha': self.id,
        }, values={
            'author': author,
            'committer': committer,
            'message': self.message,
            'parents': self.parents,
            'branches': self.branches,
            'date_created': self.author_date,
            'date_committed': self.committer_date,
        })

        # This call is relatively expensive - only do if necessary.
        if created:
            vcs = repository.get_vcs()
            if vcs:
                revision.patch_hash = vcs.get_patch_hash(self.id)

        # we also want to create a source for this item as it's the canonical
        # representation in the UI
        source = try_create(Source, {
            'revision_sha': self.id,
            'repository': repository,
        })

        return (revision, created, source)
Beispiel #30
0
    def _sync_artifact_as_log(self, artifact):
        jobstep = artifact.step
        job = artifact.job

        logsource, created = get_or_create(LogSource,
                                           where={
                                               'name':
                                               artifact.data['displayPath'],
                                               'job': job,
                                               'step': jobstep,
                                           },
                                           defaults={
                                               'job': job,
                                               'project': job.project,
                                               'date_created':
                                               job.date_started,
                                           })

        offset = 0
        with closing(self.fetch_artifact(jobstep, artifact.data)) as resp:
            iterator = resp.iter_content()
            for chunk in chunked(iterator, LOG_CHUNK_SIZE):
                chunk_size = len(chunk)
                chunk, _ = create_or_update(LogChunk,
                                            where={
                                                'source': logsource,
                                                'offset': offset,
                                            },
                                            values={
                                                'job': job,
                                                'project': job.project,
                                                'size': chunk_size,
                                                'text': chunk,
                                            })
                offset += chunk_size
Beispiel #31
0
    def save(self, repository):
        author = self._get_author(self.author)
        if self.author == self.committer:
            committer = author
        else:
            committer = self._get_author(self.committer)

        revision, created = create_or_update(Revision,
                                             where={
                                                 'repository': repository,
                                                 'sha': self.id,
                                             },
                                             values={
                                                 'author':
                                                 author,
                                                 'committer':
                                                 committer,
                                                 'message':
                                                 self.message,
                                                 'parents':
                                                 self.parents,
                                                 'branches':
                                                 self.branches,
                                                 'date_created':
                                                 self.author_date,
                                                 'date_committed':
                                                 self.committer_date,
                                             })

        return (revision, created)
Beispiel #32
0
    def _sync_artifact_as_log(self, artifact):
        jobstep = artifact.step
        job = artifact.job

        logsource, created = get_or_create(LogSource, where={
            'name': artifact.data['displayPath'],
            'job': job,
            'step': jobstep,
        }, defaults={
            'job': job,
            'project': job.project,
            'date_created': job.date_started,
        })

        offset = 0
        with closing(self.fetch_artifact(jobstep, artifact.data)) as resp:
            iterator = resp.iter_content()
            for chunk in chunked(iterator, LOG_CHUNK_SIZE):
                chunk_size = len(chunk)
                chunk, _ = create_or_update(LogChunk, where={
                    'source': logsource,
                    'offset': offset,
                }, values={
                    'job': job,
                    'project': job.project,
                    'size': chunk_size,
                    'text': chunk,
                })
                offset += chunk_size
Beispiel #33
0
    def post(self):
        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(SystemOption,
                             where={
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        return '', 200
Beispiel #34
0
    def save(self, repository):
        author = self._get_author(self.author)
        if self.author == self.committer:
            committer = author
        else:
            committer = self._get_author(self.committer)

        revision, created = create_or_update(Revision, where={
            'repository': repository,
            'sha': self.id,
        }, values={
            'author': author,
            'committer': committer,
            'message': self.message,
            'parents': self.parents,
            'branches': self.branches,
            'date_created': self.author_date,
            'date_committed': self.committer_date,
        })

        # we also want to create a source for this item as it's the canonical
        # representation in the UI
        source = try_create(Source, {
            'revision_sha': self.id,
            'repository': repository,
        })

        return (revision, created, source)
Beispiel #35
0
    def _sync_artifact_as_log(self, jobstep, job_name, build_no, artifact):
        job = jobstep.job
        logsource, created = get_or_create(LogSource, where={
            'name': artifact['displayPath'],
            'job': job,
            'step': jobstep,
        }, defaults={
            'project': job.project,
            'date_created': job.date_started,
        })

        url = '{base}/job/{job}/{build}/artifact/{artifact}'.format(
            base=self.base_url, job=job_name,
            build=build_no, artifact=artifact['relativePath'],
        )

        offset = 0
        resp = requests.get(url, stream=True, timeout=15)
        iterator = resp.iter_content()
        for chunk in chunked(iterator, LOG_CHUNK_SIZE):
            chunk_size = len(chunk)
            chunk, _ = create_or_update(LogChunk, where={
                'source': logsource,
                'offset': offset,
            }, values={
                'job': job,
                'project': job.project,
                'size': chunk_size,
                'text': chunk,
            })
            offset += chunk_size

            publish_logchunk_update(chunk)
Beispiel #36
0
    def _record_test_rerun_counts(self, test_list):
        job = self.step.job

        rerun_count = db.session.query(func.count(TestCase.id)).filter(
            TestCase.job_id == job.id,
            TestCase.reruns > 0,
        ).as_scalar()

        create_or_update(ItemStat,
                         where={
                             'item_id': self.step.id,
                             'name': 'test_rerun_count',
                         },
                         values={
                             'value': sum(1 for t in test_list if t.reruns),
                         })

        create_or_update(ItemStat,
                         where={
                             'item_id': job.id,
                             'name': 'test_rerun_count',
                         },
                         values={
                             'value': rerun_count,
                         })

        instance = try_create(ItemStat,
                              where={
                                  'item_id': job.build_id,
                                  'name': 'test_rerun_count',
                              },
                              defaults={'value': rerun_count})
        if not instance:
            ItemStat.query.filter(
                ItemStat.item_id == job.build_id,
                ItemStat.name == 'test_rerun_count',
            ).update(
                {
                    'value':
                    select([func.sum(ItemStat.value)]).where(
                        and_(
                            ItemStat.name == 'test_rerun_count',
                            ItemStat.item_id.in_(
                                select([Job.id]).where(
                                    Job.build_id == job.build_id, )))),
                },
                synchronize_session=False)
Beispiel #37
0
    def _record_test_duration(self, test_list):
        job = self.step.job

        test_duration = db.session.query(func.sum(TestCase.duration)).filter(
            TestCase.job_id == job.id, ).as_scalar()

        create_or_update(ItemStat,
                         where={
                             'item_id': self.step.id,
                             'name': 'test_duration',
                         },
                         values={
                             'value': sum(t.duration for t in test_list),
                         })

        create_or_update(ItemStat,
                         where={
                             'item_id': job.id,
                             'name': 'test_duration',
                         },
                         values={
                             'value': test_duration,
                         })

        instance = try_create(ItemStat,
                              where={
                                  'item_id': job.build_id,
                                  'name': 'test_duration',
                              },
                              defaults={'value': test_duration})
        if not instance:
            ItemStat.query.filter(
                ItemStat.item_id == job.build_id,
                ItemStat.name == 'test_duration',
            ).update(
                {
                    'value':
                    select([func.sum(ItemStat.value)]).where(
                        and_(
                            ItemStat.name == 'test_duration',
                            ItemStat.item_id.in_(
                                select([Job.id]).where(
                                    Job.build_id == job.build_id, )))),
                },
                synchronize_session=False)
Beispiel #38
0
    def _sync_log(self, jobstep, name, job_name, build_no):
        job = jobstep.job
        logsource, created = get_or_create(LogSource, where={
            'name': name,
            'step': jobstep,
        }, defaults={
            'job': job,
            'project': jobstep.project,
            'date_created': jobstep.date_started,
        })
        if created:
            offset = 0
        else:
            offset = jobstep.data.get('log_offset', 0)

        url = '{base}/job/{job}/{build}/logText/progressiveText/'.format(
            base=jobstep.data['master'],
            job=job_name,
            build=build_no,
        )

        session = self.http_session
        with closing(session.get(url, params={'start': offset}, stream=True, timeout=15)) as resp:
            log_length = int(resp.headers['X-Text-Size'])

            # When you request an offset that doesnt exist in the build log, Jenkins
            # will instead return the entire log. Jenkins also seems to provide us
            # with X-Text-Size which indicates the total size of the log
            if offset > log_length:
                return

            # XXX: requests doesnt seem to guarantee chunk_size, so we force it
            # with our own helper
            iterator = resp.iter_content()
            for chunk in chunked(iterator, LOG_CHUNK_SIZE):
                chunk_size = len(chunk)
                chunk, _ = create_or_update(LogChunk, where={
                    'source': logsource,
                    'offset': offset,
                }, values={
                    'job': job,
                    'project': job.project,
                    'size': chunk_size,
                    'text': chunk,
                })
                offset += chunk_size

            # Jenkins will suggest to us that there is more data when the job has
            # yet to complete
            has_more = resp.headers.get('X-More-Data') == 'true'

        # We **must** track the log offset externally as Jenkins embeds encoded
        # links and we cant accurately predict the next `start` param.
        jobstep.data['log_offset'] = log_length
        db.session.add(jobstep)

        return True if has_more else None
Beispiel #39
0
    def _sync_log(self, jobstep, name, job_name, build_no):
        job = jobstep.job
        # TODO(dcramer): this doesnt handle concurrency
        logsource, created = get_or_create(LogSource, where={
            'name': name,
            'job': job,
        }, defaults={
            'step': jobstep,
            'project': jobstep.project,
            'date_created': jobstep.date_started,
        })
        if created:
            offset = 0
        else:
            offset = jobstep.data.get('log_offset', 0)

        url = '{base}/job/{job}/{build}/logText/progressiveHtml/'.format(
            base=self.base_url,
            job=job_name,
            build=build_no,
        )

        resp = requests.get(
            url, params={'start': offset}, stream=True, timeout=15)
        log_length = int(resp.headers['X-Text-Size'])
        # When you request an offset that doesnt exist in the build log, Jenkins
        # will instead return the entire log. Jenkins also seems to provide us
        # with X-Text-Size which indicates the total size of the log
        if offset > log_length:
            return

        iterator = resp.iter_content()
        # XXX: requests doesnt seem to guarantee chunk_size, so we force it
        # with our own helper
        for chunk in chunked(iterator, LOG_CHUNK_SIZE):
            chunk_size = len(chunk)
            chunk, _ = create_or_update(LogChunk, where={
                'source': logsource,
                'offset': offset,
            }, values={
                'job': job,
                'project': job.project,
                'size': chunk_size,
                'text': chunk,
            })
            offset += chunk_size

            publish_logchunk_update(chunk)

        # We **must** track the log offset externally as Jenkins embeds encoded
        # links and we cant accurately predict the next `start` param.
        jobstep.data['log_offset'] = log_length
        db.session.add(jobstep)

        # Jenkins will suggest to us that there is more data when the job has
        # yet to complete
        return True if resp.headers.get('X-More-Data') == 'true' else None
Beispiel #40
0
    def post(self, project_id):
        project = self._get_project(project_id)
        if project is None:
            return '', 404

        args = self.parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue
            create_or_update(ProjectOption, where={
                'project': project,
                'name': name,
            }, values={
                'value': value,
            })

        return '', 200
Beispiel #41
0
    def post(self):
        user = get_current_user()
        if user is None:
            return error("User not found", http_code=404)

        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': user.id,
                'name': name,
            }, values={
                'value': value,
            })

        return self.respond({})
Beispiel #42
0
    def post(self, plan_id):
        plan = Plan.query.get(plan_id)
        if plan is None:
            return error("Plan not found", http_code=404)

        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': plan.id,
                'name': name,
            }, values={
                'value': value,
            })

        return self.respond({})
Beispiel #43
0
    def post(self, plan_id):
        plan = Plan.query.get(plan_id)
        if plan is None:
            return '', 404

        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': plan.id,
                'name': name,
            }, values={
                'value': value,
            })

        return '', 200
Beispiel #44
0
    def post(self):
        user = get_current_user()
        if user is None:
            return '', 404

        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': user.id,
                'name': name,
            }, values={
                'value': value,
            })

        return '', 200
Beispiel #45
0
    def post(self, plan_id):
        plan = Plan.query.get(plan_id)
        if plan is None:
            return '', 404

        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(ItemOption, where={
                'item_id': plan.id,
                'name': name,
            }, values={
                'value': value,
            })

        return '', 200
Beispiel #46
0
    def _record_test_rerun_counts(self, test_list):
        job = self.step.job

        rerun_count = db.session.query(func.count(TestCase.id)).filter(
            TestCase.job_id == job.id,
            TestCase.reruns > 0,
        ).as_scalar()

        create_or_update(ItemStat, where={
            'item_id': self.step.id,
            'name': 'test_rerun_count',
        }, values={
            'value': sum(1 for t in test_list if t.reruns),
        })

        create_or_update(ItemStat, where={
            'item_id': job.id,
            'name': 'test_rerun_count',
        }, values={
            'value': rerun_count,
        })

        instance = try_create(ItemStat, where={
            'item_id': job.build_id,
            'name': 'test_rerun_count',
        }, defaults={
            'value': rerun_count
        })
        if not instance:
            ItemStat.query.filter(
                ItemStat.item_id == job.build_id,
                ItemStat.name == 'test_rerun_count',
            ).update({
                'value': select([func.sum(ItemStat.value)]).where(
                    and_(
                        ItemStat.name == 'test_rerun_count',
                        ItemStat.item_id.in_(select([Job.id]).where(
                            Job.build_id == job.build_id,
                        ))
                    )
                ),
            }, synchronize_session=False)
Beispiel #47
0
    def _record_test_duration(self, test_list):
        job = self.step.job

        test_duration = db.session.query(func.sum(TestCase.duration)).filter(
            TestCase.job_id == job.id,
        ).as_scalar()

        create_or_update(ItemStat, where={
            'item_id': self.step.id,
            'name': 'test_duration',
        }, values={
            'value': sum(t.duration for t in test_list),
        })

        create_or_update(ItemStat, where={
            'item_id': job.id,
            'name': 'test_duration',
        }, values={
            'value': test_duration,
        })

        instance = try_create(ItemStat, where={
            'item_id': job.build_id,
            'name': 'test_duration',
        }, defaults={
            'value': test_duration
        })
        if not instance:
            ItemStat.query.filter(
                ItemStat.item_id == job.build_id,
                ItemStat.name == 'test_duration',
            ).update({
                'value': select([func.sum(ItemStat.value)]).where(
                    and_(
                        ItemStat.name == 'test_duration',
                        ItemStat.item_id.in_(select([Job.id]).where(
                            Job.build_id == job.build_id,
                        ))
                    )
                ),
            }, synchronize_session=False)
    def post(self, plan_id):
        plan = Plan.query.get(plan_id)
        if plan is None:
            return error("Plan not found", http_code=404)

        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(ItemOption,
                             where={
                                 'item_id': plan.id,
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        return self.respond({})
Beispiel #49
0
    def post(self):
        user = get_current_user()
        if user is None:
            return error("User not found", http_code=404)

        args = self.post_parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            create_or_update(ItemOption,
                             where={
                                 'item_id': user.id,
                                 'name': name,
                             },
                             values={
                                 'value': value,
                             })

        return self.respond({})
    def post(self, project_id):
        project = self._get_project(project_id)
        if project is None:
            return "", 404

        args = self.parser.parse_args()

        for name, value in args.iteritems():
            if value is None:
                continue

            # If we're rolling back a snapshot, take note.
            if name == "snapshot.current":
                current = Snapshot.get_current(project.id)
                if current:
                    replacement = Snapshot.query.get(value)
                    if replacement.date_created < current.date_created:
                        _report_snapshot_downgrade(project)

            create_or_update(ProjectOption, where={"project": project, "name": name}, values={"value": value})

        return "", 200
def _set_latest_green_build_for_each_branch(build, source, vcs):
    project = build.project
    for branch in source.revision.branches:
        current_latest_green_build = LatestGreenBuild.query.options(
            joinedload('build').joinedload('source')).filter(
                LatestGreenBuild.project_id == project.id,
                LatestGreenBuild.branch == branch).first()

        if current_latest_green_build:
            child_in_question = source.revision_sha
            parent_in_question = current_latest_green_build.build.source.revision_sha
            try:
                if not vcs.is_child_parent(
                        child_in_question=child_in_question,
                        parent_in_question=parent_in_question):
                    return
            except UnknownChildRevision:
                # The child_in_question is an unknown SHA. This shouldn't happen.
                logging.exception(
                    "Child SHA is missing from the VCS. This is bad news and "
                    "shouldn't happen. (parent=%s, child=%s)",
                    parent_in_question, child_in_question)
                return
            except UnknownParentRevision:
                # The parent_in_question is an unknown SHA. Assume it was
                # deleted and forgotten VCS, and set the new green build to
                # the child_in_question anyway.
                logging.warning(
                    "Parent SHA is missing from the VCS. Assume it was deleted "
                    "and the new build is legit.",
                    extra={
                        'data': {
                            'repository_url': source.revision.repository.url,
                            'branch': branch,
                            'parent': parent_in_question,
                            'child': child_in_question,
                        },
                    })

        # switch latest_green_build to this sha
        green_build, _ = create_or_update(LatestGreenBuild,
                                          where={
                                              'project_id': project.id,
                                              'branch': branch,
                                          },
                                          values={
                                              'build': build,
                                          })
Beispiel #52
0
def _set_latest_green_build_for_each_branch(build, source, vcs):
    project = build.project
    for branch in source.revision.branches:
        current_latest_green_build = LatestGreenBuild.query.options(
            joinedload('build').joinedload('source')
        ).filter(
            LatestGreenBuild.project_id == project.id,
            LatestGreenBuild.branch == branch).first()

        if current_latest_green_build:
            child_in_question = source.revision_sha
            parent_in_question = current_latest_green_build.build.source.revision_sha
            try:
                if not vcs.is_child_parent(
                     child_in_question=child_in_question,
                     parent_in_question=parent_in_question):
                    return
            except UnknownChildRevision:
                # The child_in_question is an unknown SHA. This shouldn't happen.
                logging.exception(
                    "Child SHA is missing from the VCS. This is bad news and "
                    "shouldn't happen. (parent=%s, child=%s)",
                    parent_in_question, child_in_question)
                return
            except UnknownParentRevision:
                # The parent_in_question is an unknown SHA. Assume it was
                # deleted and forgotten VCS, and set the new green build to
                # the child_in_question anyway.
                logging.warning(
                    "Parent SHA is missing from the VCS. Assume it was deleted "
                    "and the new build is legit.",
                    extra={
                        'data': {
                            'repository_url': source.revision.repository.url,
                            'branch': branch,
                            'parent': parent_in_question,
                            'child': child_in_question,
                        },
                    }
                )

        # switch latest_green_build to this sha
        green_build, _ = create_or_update(LatestGreenBuild, where={
            'project_id': project.id,
            'branch': branch,
        }, values={
            'build': build,
        })
Beispiel #53
0
def _set_latest_green_build_for_each_branch(build, source, vcs):
    project = build.project
    for branch in source.revision.branches:
        current_latest_green_build = LatestGreenBuild.query.options(
            joinedload('build').joinedload('source')
        ).filter(
            LatestGreenBuild.project_id == project.id,
            LatestGreenBuild.branch == branch).first()

        if not current_latest_green_build or vcs.is_child_parent(
                child_in_question=source.revision_sha,
                parent_in_question=current_latest_green_build.build.source.revision_sha):
            # switch latest_green_build to this sha
            green_build, _ = create_or_update(LatestGreenBuild, where={
                'project_id': project.id,
                'branch': branch,
            }, values={
                'build': build,
            })
Beispiel #54
0
    def save(self, repository):
        author = self._get_author(self.author)
        if self.author == self.committer:
            committer = author
        else:
            committer = self._get_author(self.committer)

        revision, created = create_or_update(Revision,
                                             where={
                                                 'repository': repository,
                                                 'sha': self.id,
                                             },
                                             values={
                                                 'author':
                                                 author,
                                                 'committer':
                                                 committer,
                                                 'message':
                                                 self.message,
                                                 'parents':
                                                 self.parents,
                                                 'branches':
                                                 self.branches,
                                                 'date_created':
                                                 self.author_date,
                                                 'date_committed':
                                                 self.committer_date,
                                             })

        # This call is relatively expensive - only do if necessary.
        if created:
            vcs = repository.get_vcs()
            if vcs:
                revision.patch_hash = vcs.get_patch_hash(self.id)

        # we also want to create a source for this item as it's the canonical
        # representation in the UI
        source = try_create(Source, {
            'revision_sha': self.id,
            'repository': repository,
        })

        return (revision, created, source)
Beispiel #55
0
    def _sync_artifact_as_log(self, artifact):
        jobstep = artifact.step
        job = artifact.job

        logsource, created = get_or_create(LogSource,
                                           where={
                                               'name':
                                               artifact.data['displayPath'],
                                               'job': job,
                                               'step': jobstep,
                                           },
                                           defaults={
                                               'job': job,
                                               'project': job.project,
                                               'date_created':
                                               job.date_started,
                                           })

        url = '{base}/job/{job}/{build}/artifact/{artifact}'.format(
            base=self.base_url,
            job=jobstep.data['job_name'],
            build=jobstep.data['build_no'],
            artifact=artifact.data['relativePath'],
        )

        offset = 0
        session = requests.Session()
        with closing(session.get(url, stream=True, timeout=15)) as resp:
            iterator = resp.iter_content()
            for chunk in chunked(iterator, LOG_CHUNK_SIZE):
                chunk_size = len(chunk)
                chunk, _ = create_or_update(LogChunk,
                                            where={
                                                'source': logsource,
                                                'offset': offset,
                                            },
                                            values={
                                                'job': job,
                                                'project': job.project,
                                                'size': chunk_size,
                                                'text': chunk,
                                            })
                offset += chunk_size
Beispiel #56
0
def _set_latest_green_build_for_each_branch(build, source, vcs):
    project = build.project
    for branch in source.revision.branches:
        current_latest_green_build = LatestGreenBuild.query.options(
            joinedload('build').joinedload('source')
        ).filter(
            LatestGreenBuild.project_id == project.id,
            LatestGreenBuild.branch == branch).first()

        if not current_latest_green_build or vcs.is_child_parent(
                child_in_question=source.revision_sha,
                parent_in_question=current_latest_green_build.build.source.revision_sha):
            # switch latest_green_build to this sha
            green_build, _ = create_or_update(LatestGreenBuild, where={
                'project_id': project.id,
                'branch': branch,
            }, values={
                'build': build,
            })
    def post(self):
        """ HTTP POST to create or update AdminMessages.

        This API enforces that we only ever have at most one message. This will
        likely change in the future. To clear the current message, post an empty
        message. Messages cannot be deleted once created.

        Returns:
            str: JSON representation of the AdminMessage that was edited.
        """
        args = self.post_parser.parse_args()

        # Enforce we only ever have a single message
        message, _ = create_or_update(AdminMessage, where={}, values={
            'message': args.message,
            'user_id': get_current_user().id,
            'date_created': datetime.utcnow()
        })

        # Response isn't required, but we give one to make testing easier
        return self.respond(message)
Beispiel #58
0
def build_finished_handler(build_id, **kwargs):
    build = Build.query.get(build_id)
    if build is None:
        return

    if build.cause == Cause.snapshot:
        return

    if build.result != Result.passed:
        return

    url = current_app.config.get('GREEN_BUILD_URL')
    if not url:
        logger.info('GREEN_BUILD_URL not set')
        return

    auth = current_app.config['GREEN_BUILD_AUTH']
    if not auth:
        logger.info('GREEN_BUILD_AUTH not set')
        return

    source = build.source

    # we only want to identify stable revisions
    if not source.is_commit():
        logger.debug('Ignoring build due to non-commit: %s', build.id)
        return

    options = get_options(build.project_id)

    if options.get('green-build.notify', '1') != '1':
        logger.info('green-build.notify disabled for project: %s', build.project_id)
        return

    vcs = source.repository.get_vcs()
    if vcs is None:
        logger.info('Repository has no VCS set: %s', source.repository.id)
        return

    # ensure we have the latest changes
    if vcs.exists():
        vcs.update()
    else:
        vcs.clone()

    release_id = get_release_id(source, vcs)

    project = options.get('green-build.project') or build.project.slug

    logging.info('Making green_build request to %s', url)
    try:
        requests.post(url, auth=auth, data={
            'project': project,
            'id': release_id,
            'build_url': build_uri('/projects/{0}/builds/{1}/'.format(
                build.project.slug, build.id.hex)),
            'build_server': 'changes',
        })
    except Exception:
        logger.exception('Failed to report green build')
        status = 'fail'
    else:
        status = 'success'

    create_or_update(Event, where={
        'type': EventType.green_build,
        'item_id': build.id,
    }, values={
        'data': {
            'status': status,
        },
        'date_modified': datetime.utcnow(),
    })

    # set latest_green_build if latest for each branch:
    _set_latest_green_build_for_each_branch(build, source, vcs)