def insert_articles(articles): if len(articles) is 0: return session = db.Session() session.bulk_save_objects(articles) session.commit()
def get_latest(cls) -> Dict[Text, MetricImplementation]: """Fetch the latest result for each metric. Returns: Mapping of metric names to the latest result for each. """ logging.debug('Fetching latest metric results') metric_results = models.MetricResult.__table__ session = db.Session() active_metrics_names = cls._active_metrics.keys() max_dates_query = session.query( metric_results.c.name, sqlalchemy.func.max( metric_results.c.computed_at).label('max_computed_at') ).group_by(metric_results.c.name).filter( metric_results.c.name.in_(active_metrics_names)).subquery('latest') latest_results_query = session.query(models.MetricResult).join( max_dates_query, sqlalchemy.and_( metric_results.c.name == max_dates_query.c.name, metric_results.c.computed_at == max_dates_query.c.max_computed_at)) results = { result.name: cls.__from_result(result) for result in latest_results_query } session.close() return results
def add_to_blacklist(owner_id, pair_id): user = db.Blacklist(owner_id=owner_id, pair_id=pair_id) session = db.Session() session.add(user) session.commit()
def update(): os = olddb.Session() ns = db.Session() ballot = ns.query(m.BallotSeason).get(new_ballot_id) reserves = os.query(OldReservation).filter( OldReservation.ballot == old_ballot_id) for r in reserves: room = ns.query(m.Room).get(r.room) if not room: print("ER: room {} does not exist".format(r.room)) continue user = ns.query(m.Person).get(r.person) if not user: print("EU: user {} does not exist".format(r.person)) continue listing = room.listing_for.get(ballot) if not listing: print("EL: no listing for room {} ({})".format( r.room, room.pretty_name())) continue if not any(user == o.resident for o in listing.occupancies): listing.occupancies.append( m.Occupancy(resident=user, chosen_at=r.ts_chosen)) print("B: {:6s} -> {:3d} ({})".format(user.crsid, room.id, room.pretty_name())) ns.commit() ns.close() os.close()
def add_to_whitelist(owner_id, pair_id, photos, full_name, url): user = db.Whitelist(owner_id=owner_id, pair_id=pair_id, photos=photos, full_name=full_name, url=url) session = db.Session() session.add(user) session.commit()
def drop_whitelist(user_id): session = db.Session() query = session.query(db.Whitelist).filter(db.Whitelist.owner_id == user_id) for obj in query: session.delete(obj) session.commit()
def delete_user(user_id): session = db.Session() query = session.query(db.Vkuser).filter(db.Vkuser.vk_id == user_id) for obj in query: session.delete(obj) session.commit()
def __init__(self): self.github = github.GitHubGraphQL() self.session = db.Session() commits = self.session.query(models.Commit.hash).union( self.session.query(models.Cherrypick.hash)).all() self.seen_commit_hashes = set(commit.hash for commit in commits)
def _compute_value(self) -> float: """Computes the average number of commits per release over the last 90 days. Considering only production releases, we can just count all commits committed between the first and last release of the 90-day window, and divide be the number of releases (excluding the last one). Raises: ValueError: if less than two releases exist. Returns: The average number of commits per release. """ logging.info('Counting commits per release') session = db.Session() releases = session.query(models.Release).filter( models.Release.is_last_90_days(base_time=self.base_time)).order_by( models.Release.published_at.desc()).all() release_count = len(releases) if release_count < 2: raise ValueError( 'Not enough releases to determine a range of commits.') last_release_date = releases[0].published_at first_release_date = releases[-1].published_at commits_count = session.query(models.Commit).filter( models.Commit.committed_at.between(first_release_date, last_release_date)).count() session.close() # Subtract one from release count since commits from the last release are # not included. return commits_count / (release_count - 1)
def remove_from_whitelist(id): session = db.Session() try: user = session.query(db.Whitelist).filter(db.Whitelist.pair_id == id).one() add_to_blacklist(user.owner_id, user.pair_id) session.delete(user) session.commit() except NoResultFound: pass
def recompute(self) -> None: """Computes the metric and records the result in the `metrics` table.""" logging.info('Recomputing metric %s', self.name) self.result = models.MetricResult(value=self._compute_value(), name=self.name) logging.info('Updating metric %s value to %.3g', self.name, self.result.value) session = db.Session() session.add(self.result) session.commit()
def _get_metric_history(self) -> Sequence[models.MetricResult]: start_date = datetime.datetime.now() - datetime.timedelta( days=self.history_days) session = db.Session() history = session.query(models.MetricResult).order_by( models.MetricResult.computed_at.asc()).filter( (models.MetricResult.name == self.metric.name) & (models.MetricResult.computed_at > start_date)).all() session.close() return history
def _compute_value(self) -> float: """Counts the number of cherry-picks in the last 90 days. Returns: The number of cherry-picks. """ logging.info('Counting cherry-picks') session = db.Session() result = session.query(models.Cherrypick).join(models.Release).filter( models.Release.is_last_90_days(base_time=self.base_time)).count() session.close() return result
def _compute_value(self) -> float: """Counts the number of cherry-picks in the last 90 days. Returns: The number of cherry-picks. """ logging.info('Counting cherry-picks') session = db.Session() result = models.CherrypickIssue.scope( session, base_time=self.base_time).count() session.close() return result
def get_or_create_site(name) -> Site: session = db.Session() site = session.query(Site).filter_by(name=name).first() if site is None: site = Site() site.name = name session.add(site) session.commit() return site
def user_info(user_id): info = dict() session = db.Session() query = session.query(db.Vkuser).filter(db.Vkuser.vk_id == user_id).one() info['vk_id'] = query.vk_id info['birthdate'] = query.birthdate info['sex'] = query.sex info['first_name'] = query.first_name info['last_name'] = query.last_name info['city'] = query.city return info
def add_to_users(values: dict): user = db.Vkuser( vk_id=values['vk_id'], birthdate=values['birthdate'], sex=values['sex'], first_name=values['first_name'], last_name=values['last_name'], city=values['city'] ) session = db.Session() session.add(user) session.commit()
def _compute_value(self) -> float: """Determines the code coverage percentage from Codecov. Returns: The percentage of lines tested in HEAD. """ session = db.Session() head_commit = session.query(models.Commit).filter( models.Commit.committed_at < self.base_time).order_by( models.Commit.committed_at.desc()).first() session.close() if not head_commit: raise ValueError('No commit available before %s' % self.base_time) return codecov.CodecovApi().get_absolute_coverage(head_commit.hash) / 100
def _count_states(self) -> Dict[models.TravisState, int]: """Counts the number of builds for each relevant build state.""" logging.info('Counting successful builds') session = db.Session() count_query = session.query( models.Build.state, sqlalchemy.func.count().label('state_count')).filter( models.Build.is_last_90_days()).group_by( models.Build.state).filter( models.Build.state.in_([ models.TravisState.PASSED, models.TravisState.FAILED, models.TravisState.ERRORED, ])) state_counts = count_query.all() return dict(state_counts)
def _compute_value(self) -> float: """Computes the average duration of all completed builds. Excludes builds that are newly created, pending, cancelled, or errored since these either have no duration or are not representative of a real build. Raises: ValueError: if no builds exist. Returns: The percentage of passing builds. """ session = db.Session() avg_seconds = session.query(sqlalchemy.func.avg( models.Build.duration)).filter(models.Build.is_last_90_days()).scalar() if avg_seconds: return float(avg_seconds) raise ValueError('No Travis builds to process.')
def _compute_value(self) -> float: """Computes the percentage of flaky builds. A flaky build is defined here as any failing build surrounded by two passing builds (Pass-Fail-Pass). Excludes builds that are newly created, pending, or cancelled. Raises: ValueError: if no builds or too few builds exist. Returns: The percentage of passing builds. """ logging.info('Counting flaky builds') session = db.Session() builds = models.Build.scope(session, base_time=self.base_time).filter( models.Build.state.in_([ models.TravisState.PASSED, models.TravisState.FAILED, models.TravisState.ERRORED, ])).all() session.close() build_count = len(builds) if build_count == 0: raise ValueError('No Travis builds to process.') if build_count < 3: raise ValueError( 'Not enough Travis builds to determine flakiness.') flakes = 0 build_triples = zip(builds[:-2], builds[1:-1], builds[2:]) for prev_build, curr_build, next_build in build_triples: if (prev_build.state == models.TravisState.PASSED and curr_build.state != models.TravisState.PASSED and next_build.state == models.TravisState.PASSED): flakes += 1 return flakes / build_count
def compute_all(metrics: Sequence[base_metric.MetricImplementation]): """Compute metric results for each week going back one year.""" one_year_ago = datetime.datetime.now() - ONE_YEAR metric_names = [metric.name for metric in metrics] logging.info('Backfilling results to %s', one_year_ago.strftime('%Y-%m-%d')) session = db.Session() earliest_result = session.query(models.MetricResult).filter( models.MetricResult.name in metric_names).order_by( models.MetricResult.computed_at.asc()).first() session.close() earliest_result_time = (earliest_result.computed_at if earliest_result else datetime.datetime.now()) next_result_time = earliest_result_time - ONE_WEEK while next_result_time > one_year_ago: _compute_all_at_time(metrics=metrics, base_time=next_result_time) next_result_time = next_result_time - ONE_WEEK logging.info('Finished backfilling metric results')
def __init__(self): self.github = github.GitHubGraphQL() self.session = db.Session() self.cursor = None
def run(entry_point): sys.path.append('..') import db, olddb new_session = db.Session() entry_point(new_session=new_session, old_session=olddb.Session()) new_session.commit()
def blacklist_ids(user_id): session = db.Session() query = session.query(db.Blacklist).filter(db.Blacklist.owner_id == user_id).all() ids = [row.pair_id for row in query] return ids
def check_id_in_database(id): session = db.Session() query = session.query(db.Vkuser).filter(db.Vkuser.vk_id == id).all() ids = [row.vk_id for row in query] return int(id) in ids
def __init__(self): self.travis = travis.TravisApi() self.session = db.Session() self.cursor = None
def show_whitelist(id): session = db.Session() query = session.query(db.Whitelist).filter(db.Whitelist.owner_id == id).all() return [[row.pair_id, row.full_name, row.photos, row.url] for row in query]
def get_latest_article(owner_id) -> Article: session = db.Session() return session.query(Article) \ .filter_by(owner_id=owner_id) \ .order_by(db.desc(Article.date)).first()
def get_all_articles(): session = db.Session() return session.query(Article)