Exemple #1
0
def stars_mature(num_days):
    date_from = (datetime.now() + timedelta(days=num_days * -1)).strftime('%Y-%m-%d')
    service = bigquery.instance(app)
    query = """
        SELECT
            COUNT(1) AS stars, YEAR(created_at) AS y, DAYOFYEAR(created_at) AS doy
        FROM
            TABLE_DATE_RANGE(
                githubarchive:day.events_, TIMESTAMP('{date_from}'), CURRENT_TIMESTAMP()
            )
        WHERE repo.id = {id} AND type IN ('WatchEvent', 'ForkEvent')
        GROUP BY y, doy
    """
    jobs = []

    repos = Repo.query.filter(Repo.mature.is_(True)).filter(Repo.status == 'new').limit(100)
    for repo in repos:
        job = Job(service, query.format(id=repo.id, date_from=date_from), batch=True)
        job.execute()
        jobs.append((job, repo))

    for job in jobs:
        for row in results_of(job[0]):
            db.session.add(RepoStars(repo_id=job[1].id, stars=row[0], year=row[1], day=row[2]))

        job[1].status = 'unknown'

        db.session.commit()
Exemple #2
0
def stars_mature(num_days):
    service = bigquery.instance(app)

    jobs = []

    repos = Repo.query\
        .filter(Repo.mature.is_(True))\
        .filter(Repo.status == 'new')\
        .order_by(Repo.checked_at.asc())\
        .limit(40)  # we are at the free plan
    for repo in repos:
        query = query_stars_by_repo(repo_id=repo.id,
                                    date_from=datetime.now() +
                                    timedelta(days=num_days * -1),
                                    date_to=datetime.now())

        job = Job(service, query, batch=True)
        job.execute()

        jobs.append((job, repo))

    for job in jobs:
        for row in results_of(job[0]):
            db.session.add(
                RepoStars(repo_id=job[1].id,
                          stars=row[0],
                          year=row[1],
                          day=row[2]))

        job[1].status = 'unknown'

        db.session.commit()
Exemple #3
0
def stars_mature(num_days):
    service = bigquery.instance(app)

    jobs = []

    repos = Repo.query\
        .filter(Repo.mature.is_(True))\
        .filter(Repo.status == 'new')\
        .order_by(Repo.checked_at.asc())\
        .limit(100)  # donations will increase this number
    for repo in repos:
        query = query_stars_by_repo(
            repo_id=repo.id, date_from=datetime.now() + timedelta(days=num_days * -1),
            date_to=datetime.now()
        )

        job = Job(service, query, batch=True)
        job.execute()

        jobs.append((job, repo))

    for job in jobs:
        for row in results_of(job[0]):
            db.session.add(RepoStars(repo_id=job[1].id, stars=row[0], year=row[1], day=row[2]))

        status_old = job[1].status
        job[1].status = 'unknown'

        db.session.commit()

        app.logger.info(
            'Repository {0} got a new status {1} (was: {2})'
            .format(job[1].id, job[1].status, status_old)
        )
Exemple #4
0
                                                   day=1),
                                date_to=datetime(year=now.year,
                                                 month=now.month - 1,
                                                 day=1))

    job = Job(service, query)
    job.execute()

    cnt = 0
    lst = {}
    for row in results_of(job):
        key = '{} {}'.format(row[1], row[3])
        lst[key] = lst.get(key, ()) + ((int(row[2]), int(row[0])), )

        db.session.merge(
            RepoStars(repo_id=result.id, stars=row[0], year=row[1],
                      day=row[2]))
        cnt += 1

    db.session.query(RepoMean).filter(RepoMean.repo_id == result.id).delete()
    db.session.commit()

    for key in lst.keys():
        avg = repo_mean(lst[key], 28, 4, last_known_mean(result.id))
        db.session.add(
            RepoMean(repo_id=result.id,
                     created_at=datetime.strptime(key, '%Y %m'),
                     value=avg))
        db.session.commit()

    db.session.query(Repo).filter(Repo.id == result.id)\
        .update({Repo.status: 'unknown', Repo.last_reset_at: now})