def process_invoice_data(invoice_data):
     log.debug('process_invoice_data', invoice_data=invoice_data)
     with session_scope(echo=True) as db_session:
         invoice = db_session.query(Invoices).filter(
             Invoices.id == invoice_data['id']).one()
         invoice.status = invoice_data['status']
         invoice.data = invoice_data
Example #2
0
 def payment_notification():
     r = request.get_json()
     log.debug('invoice_notification', request=r, session=session)
     if 'data' in r:
         r = r['data']
     app.payment_processor.process_invoice_data(r)
     return {}
Example #3
0
 def get(self, login: str) -> dict:
     variables = {'userLogin': login}
     json_object = {'query': user_graphql_query, 'variables': variables}
     log.debug('getting user', json_object=json_object)
     r = self.graphql_post(json_object=json_object)
     user = r.json()['data']['user']
     return user
 def generate_invoice(self, bounty_id: str, recipient_user_id: str):
     with session_scope() as db_session:
         bounty: Bounties = db_session.query(Bounties).filter(Bounties.id == bounty_id).one()
         recipient: Users = db_session.query(Users).filter(Users.id == recipient_user_id).one()
         if recipient.btcpay_client is None:
             flash(f'{recipient.best_name} does not have BTCPay configured here. Elsewhere they may have other ways '
                   f'of receiving payments (Patreon, static address, etc).')
             return redirect(url_for('bounties-payable.index_view'))
         try:
             recipient_btcpay = RecipientBTCPay(client=recipient.btcpay_client)
             invoice_data = recipient_btcpay.get_pull_request_invoice(
                 amount=bounty.amount,
                 bounty_id=bounty_id,
                 pull_request_number=bounty.pull_request.number
             )
             invoice_model = Invoices()
             invoice_model.bounty_id = bounty.id
             invoice_model.id = invoice_data['id']
             invoice_model.status = invoice_data['status']
             invoice_model.url = invoice_data['url']
             invoice_model.recipient_user_id = recipient_user_id
             invoice_model.payer_user_id = bounty.payer_user_id
             db_session.add(invoice_model)
             return redirect(invoice_model.url)
         except RequestException as e:
             log.debug('RequestException', exception=e, request=e.request, response=e.response)
             try:
                 r: Response = e.response
                 flash(f'{r.status_code} - {r.text}', category='error')
             except AttributeError as e:
                 flash('Request error')
             return redirect(url_for('users.index_view'))
Example #5
0
 def graphql_post(self, json_object: dict):
     log.debug('graphql post', api_url=self.api_url, json=json_object)
     r = requests.post(self.api_url + 'graphql',
                       auth=self.auth,
                       headers=self.dev_preview_headers,
                       json=json_object)
     r.raise_for_status()
     return r
Example #6
0
def get_url():
    pg_url = URL(drivername='postgresql+psycopg2',
                 username=os.environ['POSTGRES_USER'],
                 password=os.environ['POSTGRES_PASSWORD'],
                 host=os.environ['PGHOST'],
                 port=os.environ['PGPORT'],
                 database=os.environ['POSTGRES_DB'])
    log.debug('get PG url', pg_url=pg_url)
    return pg_url
 def get_query(self):
     log.debug('get_query', request=request.args)
     if request.args.get('sort'):
         return super().get_query()
     query = (
         self.session.query(self.model)
             .order_by(self.model.is_high_priority.asc().nullslast())
     )
     if self._get_list_extra_args().sort is None:
         query = query.order_by(self.model.last_commit_pushed_date.desc().nullslast())
     return query
Example #8
0
    def on_model_change(self, form, model: Bounties, is_created: bool):
        model.id = uuid4().hex
        model.published_at = datetime.utcnow()
        model.payer_user_id = current_user.id
        model.recipient_user_id = model.pull_request.author_id

        with session_scope() as session:
            total_bounty_amount = (session.query(
                coalesce(func.sum(Bounties.amount), 0)).filter(
                    Bounties.pull_request_id == model.pull_request.id).one()
                                   )[0]
            log.debug('total_satoshis',
                      total_bounty_amount=total_bounty_amount)
            model.pull_request.total_bounty_amount = total_bounty_amount + model.amount
Example #9
0
def invoices_formatter(view, context, model, name):
    invoices: List[Invoices] = getattr(model, name)
    paid_invoices = [i for i in invoices if i.status in ('paid', 'complete')]
    unpaid_invoices = [i for i in invoices if i.status not in ('paid', 'complete')]
    output = ''
    for paid_invoice in paid_invoices:
        output += '<div style="white-space: nowrap; overflow: hidden;">' \
                  '<span class="label label-success">{invoice_description}</span>'.format(
                   invoice_description=f'{paid_invoice.id} {paid_invoice.status}')
    if unpaid_invoices:
        output += f'<div  style="white-space: nowrap; overflow: hidden;">{len(unpaid_invoices)} unpaid invoices</div>'

    log.debug('invoices_formatter', invoices=invoices, output=output)
    return Markup(output)
 def update(self):
     with session_scope() as session:
         try:
             record = (
                 session
                     .query(PullRequests.updated_at)
                     .order_by(PullRequests.updated_at.desc())
                     .limit(1)
                     .one()
             )
             from_date = record.updated_at
         except NoResultFound:
             from_date = datetime(2009, 1, 1)
     log.debug('Updating PRs starting from', from_date=from_date)
     self.update_all(newer_than=from_date)
Example #11
0
 def update_pull_requests():
     polling_data = PollingData('github')
     try:
         if polling_data.is_polling():
             raise Exception('GitHub is already being polled')
         polling_data.start()
         with TemporaryDirectory() as temporary_directory_path:
             pull_requests_data = PullRequestsData(
                 'bitcoin', 'bitcoin', temporary_directory_path)
             pull_requests_data.update()
     except Exception as e:
         log.error('polling exception', exc_info=e)
         tb = traceback.format_exc()
         email.notify('Polling exception\n\n' + tb)
     else:
         log.debug('Successful poll')
     finally:
         polling_data.stop()
Example #12
0
    def github_logged_in(github_blueprint, token):
        if not token:
            flash("Failed to log in.", category="error")
            return redirect(url_for("github.login"))

        user_resp = github_blueprint.session.get("/user")
        log.debug('user response', resp=user_resp.json())
        emails_resp = github_blueprint.session.get("/user/emails")
        log.debug('user emails response', resp=emails_resp.json())
        if not emails_resp.ok:
            log.error('github_logged_in error',
                      resp=emails_resp.json(),
                      token=token)
            msg = "Failed to fetch user info."
            flash(msg, category="error")
            return False

        info = user_resp.json()
        user_id = info["node_id"]
        email = [e for e in emails_resp.json() if e['primary']][0]['email']

        with session_scope() as db_session:
            try:
                user = db_session.query(Users).filter(
                    Users.id == user_id).one()
            except NoResultFound:
                user = Users(id=user_id)
                db_session.add(user)
            user.is_active = True
            user.email = email
            try:
                db_session.query(OAuth).filter_by(
                    provider=github_blueprint.name,
                    provider_user_id=user_id).one()
            except NoResultFound:
                oauth = OAuth(provider=github_blueprint.name,
                              provider_user_id=user_id,
                              user_id=user_id,
                              token=token)
                db_session.add(oauth)
            login_user(user)
            flash("Successfully signed in.")
        return False
Example #13
0
    def get_pull_request_invoice(self, amount: int, bounty_id: str,
                                 pull_request_number: int):
        notification_url = url_for('payment_notification', _external=True)
        if 'localhost' in notification_url:
            notification_url = notification_url.replace('localhost', 'webapp')

        payload = {
            'price': amount / COIN,
            'currency': 'BTC',
            'orderId': bounty_id,
            'itemDesc': f'Payment for pull request {pull_request_number}',
            'redirectURL': url_for('bounties-payable.index_view',
                                   _external=True),
            'notificationURL': notification_url,
            'extendedNotifications': True
        }
        log.debug('btcpay client request', payload=payload)
        invoice_data = self.client.create_invoice(payload=payload)
        log.debug('btcpay server response', invoice_data=invoice_data)
        return invoice_data
    def get_all(self,
                newer_than: datetime,
                state: PullRequestState = None,
                limit: int = None):
        variables = {}
        received = 0
        ends_at = newer_than
        while limit is None or received < limit:
            if limit is None:
                variables['prFirst'] = self.MAX_PRS
            else:
                variables['prFirst'] = min(limit - received, self.MAX_PRS)

            if state is not None:
                variables['prState'] = state.value

            formatted_ends_at = ends_at.replace(microsecond=0).astimezone(pytz.utc).isoformat()
            variables['searchQuery'] = f'type:pr updated:>={formatted_ends_at} repo:bitcoin/bitcoin sort:updated-asc'

            log.debug('Variables for graphql pull requests query', variables=variables)
            json_object = {
                'query': pull_requests_graphql_query,
                'variables': variables
            }

            data = self.graphql_post(json_object=json_object).json()

            search_data = data['data']['search']

            pull_requests_graphql_data = search_data['edges']
            results_count = len(search_data['edges'])

            log.debug(
                'response from github graphql',
                results_count=results_count
            )
            if not results_count:
                break

            starts_at = pull_requests_graphql_data[0]['node']['updatedAt']
            previous_ends_at = ends_at
            ends_at = dateutil.parser.parse(pull_requests_graphql_data[-1]['node']['updatedAt'])
            if previous_ends_at == ends_at:
                break
            log.debug(
                'Pull requests fetched',
                starts_at=starts_at,
                ends_at=ends_at
            )

            pull_requests_graphql_data = [r['node'] for r in pull_requests_graphql_data if r['node']]

            for pull_request_graphql in pull_requests_graphql_data:
                validated_pull_request_data = pull_request_schema.load(pull_request_graphql)
                self.parse_into_queue(validated_pull_request_data)
                if limit is not None and received == limit:
                    break
                received += 1
            self.flush_queue_to_database()
Example #15
0
def send_tweet(pull_request_number=None):
    with session_scope() as session:
        now = datetime.datetime.utcnow()
        yesterday = now - datetime.timedelta(days=1)
        twitter = Twython(os.environ['BTC_TWITTER_APP_KEY'],
                          os.environ['BTC_TWITTER_APP_SECRET'],
                          os.environ['BTC_TWITTER_OAUTH_TOKEN'],
                          os.environ['BTC_TWITTER_OAUTH_TOKEN_SECRET'])
        if pull_request_number:
            next_pull_request = (session.query(PullRequests).filter(
                PullRequests.number == pull_request_number).first())
        else:
            next_pull_request = (session.query(PullRequests).order_by(
                PullRequests.merged_at.asc()).filter(
                    PullRequests.merged_at > yesterday).filter(
                        PullRequests.tweet_id.is_(None)).filter(
                            PullRequests.merged_at.isnot(None)).first())
        if next_pull_request is None:
            log.debug('No pull requests found.')
            return
        commits_url = 'https://api.github.com/repos/bitcoin/bitcoin/commits'
        params = {'author': next_pull_request.author.login}
        response = requests.get(commits_url, params=params)
        log.debug('github response', response=response)
        response_json = response.json()
        author_name = next_pull_request.author.name or next_pull_request.author.login
        if len(response_json) > 1 and next_pull_request.number != 14802:
            status = 'Merged PR from {0}: {1} {2}' \
                .format(author_name,
                        next_pull_request.title,
                        next_pull_request.html_url)
        else:
            status = '''
            {0}'s first merged PR: {1}
            Congratulations!  πŸŽ‰πŸΎπŸŽ†
            '''.format(author_name, next_pull_request.html_url)
        log.debug('tweet status', status=status)
        tweet = twitter.update_status(status=status)
        log.debug('tweet', tweet=tweet)
        new_tweet = Tweets()
        new_tweet.id = tweet['id']
        new_tweet.pull_request_id = next_pull_request.number
        session.add(new_tweet)
        next_pull_request.tweet_id = tweet['id']
Example #16
0
        log.debug('github response', response=response)
        response_json = response.json()
        author_name = next_pull_request.author.name or next_pull_request.author.login
        if len(response_json) > 1 and next_pull_request.number != 14802:
            status = 'Merged PR from {0}: {1} {2}' \
                .format(author_name,
                        next_pull_request.title,
                        next_pull_request.html_url)
        else:
            status = '''
            {0}'s first merged PR: {1}
            Congratulations!  πŸŽ‰πŸΎπŸŽ†
            '''.format(author_name, next_pull_request.html_url)
        log.debug('tweet status', status=status)
        tweet = twitter.update_status(status=status)
        log.debug('tweet', tweet=tweet)
        new_tweet = Tweets()
        new_tweet.id = tweet['id']
        new_tweet.pull_request_id = next_pull_request.number
        session.add(new_tweet)
        next_pull_request.tweet_id = tweet['id']


if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser(description='Tweet merged pull request')
    parser.add_argument('-p', dest='pr_number', type=int, default=None)
    args = parser.parse_args()
    log.debug(f'sending tweet for {args.pr_number}')
    send_tweet(pull_request_number=args.pr_number)
Example #17
0
                                   or 'pull_request' in pr.keys()):
                pull_request_numbers.add(pr['number'])

        pr_data = PullRequestsData(repository_path=self.repo.path,
                                   repository_name=self.repo.name)
        for number in pull_request_numbers:
            pr_data.update(number=number)


if __name__ == '__main__':
    import os

    repository_path = 'bitcoin'
    repository_name = 'bitcoin'
    log.debug('Running pull request events update script',
              path=os.path.realpath(__file__),
              repository_name=repository_name,
              repository_path=repository_path)
    create_or_update_database()
    pr_events = PullRequestEvents(repository_path=repository_path,
                                  repository_name=repository_name)
    pr_data = PullRequestsData(repository_path=repository_path,
                               repository_name=repository_name)
    # polling_data = PollingData(repository_path=repository_path,
    #                            repository_name=repository_name)
    while True:
        pr_events.get()
        # polling_data.update(last_event=True)
        sleep_time = (datetime.utcnow() - pr_events.rate_limit_reset
                      ).seconds / pr_events.rate_limit_remaining
        time.sleep(math.ceil(sleep_time) + 5)
 def update_all(self,
                newer_than: datetime,
                state: PullRequestState = None,
                limit: int = None):
     log.debug('update_all', state=state, limit=limit, newer_than=newer_than)
     self.get_all(state=state, limit=limit, newer_than=newer_than)
    def insert_pull_requests(self):
        with session_scope() as db_session:
            missing_authors = db_session.execute(
                """
SELECT DISTINCT epr.data ->> 'author_login'
FROM etl_data epr
         LEFT OUTER JOIN users authors ON epr.data ->> 'author_login' = authors.login
WHERE authors.id IS NULL;
                """
            ).fetchall()

        if missing_authors:
            log.debug('missing_authors', missing_authors=missing_authors, count=len(missing_authors))

        for author in missing_authors:
            login = author[0]
            if login is None:
                continue
            user_data = self.users_data.get(login)
            self.users_data.upsert(user_data)

        with session_scope() as db_session:
            db_session.execute("""
WITH etl_data AS (
    SELECT DISTINCT epr.data ->> 'id'                                      AS id,
                    (epr.data ->> 'repository_id')::int                    AS repository_id,
                    author.id                                              AS author_id,
                    (epr.data ->> 'number')::int                           AS "number",
                    epr.data ->> 'state'                                   AS "state",
                    epr.data ->> 'title'                                   AS title,
                    (epr.data ->> 'createdAt')::timestamp with time zone                  AS created_at,
                    (epr.data ->> 'updatedAt')::timestamp with time zone                  AS updated_at,
                    (epr.data ->> 'is_high_priority')::timestamp with time zone           AS is_high_priority,
                    (epr.data ->> 'added_to_high_priority')::timestamp with time zone     AS added_to_high_priority,
                    (epr.data ->> 'removed_from_high_priority')::timestamp with time zone AS removed_from_high_priority,
                    (epr.data ->> 'additions')::int                        AS additions,
                    (epr.data ->> 'deletions')::int                        AS deletions,
                    epr.data ->> 'mergeable'                               AS mergeable,
                    epr.data ->> 'last_commit_state'                       AS last_commit_state,
                    epr.data ->> 'last_commit_state_description'           AS last_commit_state_description,
                    epr.data ->> 'last_commit_short_hash'                  AS last_commit_short_hash,
                    (epr.data ->> 'last_commit_pushed_date')::timestamp with time zone    AS last_commit_pushed_date,
                    epr.data ->> 'bodyText'                                AS body,
                    (epr.data ->> 'mergedAt')::timestamp with time zone                   AS merged_at,
                    (epr.data ->> 'closedAt')::timestamp with time zone                   AS closed_at,
                    (epr.data ->> 'commit_count')::int                     AS commit_count
    FROM etl_data epr
             LEFT OUTER JOIN users author
                             ON epr.data ->> 'author_login' = author.login
)
INSERT
INTO pull_requests (id,
                    repository_id,
                    author_id,
                    "number",
                    "state",
                    title,
                    created_at,
                    updated_at,
                    is_high_priority,
                    added_to_high_priority,
                    removed_from_high_priority,
                    additions,
                    deletions,
                    mergeable,
                    last_commit_state,
                    last_commit_state_description,
                    last_commit_short_hash,
                    last_commit_pushed_date,
                    body,
                    merged_at,
                    closed_at,
                    commit_count)
SELECT *
FROM etl_data
ON CONFLICT ON CONSTRAINT pull_requests_unique_constraint DO UPDATE SET repository_id                 = excluded.repository_id,
                                                                        author_id                     = excluded.author_id,
                                                                        "number"                      = excluded.number,
                                                                        "state"                       = excluded.state,
                                                                        title                         = excluded.title,
                                                                        created_at                    = excluded.created_at,
                                                                        updated_at                    = excluded.updated_at,
                                                                        is_high_priority              = excluded.is_high_priority,
                                                                        added_to_high_priority        = excluded.added_to_high_priority,
                                                                        removed_from_high_priority    = excluded.removed_from_high_priority,
                                                                        additions                     = excluded.additions,
                                                                        deletions                     = excluded.deletions,
                                                                        mergeable                     = excluded.mergeable,
                                                                        last_commit_state             = excluded.last_commit_state,
                                                                        last_commit_state_description = excluded.last_commit_state_description,
                                                                        last_commit_short_hash        = excluded.last_commit_short_hash,
                                                                        last_commit_pushed_date       = excluded.last_commit_pushed_date,
                                                                        body                          = excluded.body,
                                                                        merged_at                     = excluded.merged_at,
                                                                        closed_at                     = excluded.closed_at,
                                                                        commit_count                  = excluded.commit_count
;""")
    def insert_comments_and_reviews(self):
        with session_scope() as db_session:
            missing_authors = db_session.execute(
                """
SELECT DISTINCT etl_data.data ->> 'author_login'
FROM etl_data
         LEFT OUTER JOIN users ON etl_data.data ->> 'author_login' = users.login
WHERE users.id IS NULL;
                """
            ).fetchall()

        if missing_authors:
            log.debug('missing_authors', missing_authors=missing_authors, count=len(missing_authors))

        for author in missing_authors:
            login = author[0]
            if login is None:
                continue
            user_data = self.users_data.get(login)
            self.users_data.upsert(user_data)

        with session_scope() as db_session:
            db_session.execute(
                """
WITH etl_data AS (
    SELECT DISTINCT etl_data.data ->> 'id'                                                  AS id,
                    etl_data.data ->> 'bodyText'                                            AS body,
                    (etl_data.data ->> 'publishedAt')::timestamp with time zone                            AS published_at,
                    etl_data.data ->> 'url'                                                 AS url,
                    etl_data.data ->> 'pull_request_id'                                     AS pull_request_id,
                    users.id                                                                AS author_id,
                    split_part(etl_data.data ->> 'review_decision', '.', 2)::reviewdecision AS auto_detected_review_decision
    FROM etl_data
             LEFT OUTER JOIN users
                             ON etl_data.data ->> 'author_login' = users.login
)
INSERT
INTO comments (id,
               body,
               published_at,
               url,
               pull_request_id,
               author_id,
               auto_detected_review_decision)
SELECT *
FROM etl_data
ON CONFLICT (id) DO UPDATE SET id = excluded.id,
                               body                          = excluded.body,
                               published_at                  = excluded.published_at,
                               url                           = excluded.url,
                               pull_request_id               = excluded.pull_request_id,
                               author_id                     = excluded.author_id,
                               auto_detected_review_decision = excluded.auto_detected_review_decision
;
                """
            )
        with session_scope() as db_session:
            db_session.execute(
                """
WITH etl_data AS (
    SELECT DISTINCT etl_data.data ->> 'pull_request_id' AS pull_request_id FROM etl_data
)
UPDATE pull_requests
SET review_decisions_count = s.review_decisions_count
from (SELECT count(comments.id) as review_decisions_count,
             etl_data.pull_request_id
      FROM etl_data
               LEFT JOIN comments on etl_data.pull_request_id = comments.pull_request_id AND
                                     comments.auto_detected_review_decision is not null and
                                     comments.auto_detected_review_decision != 'NONE'::reviewdecision
      GROUP BY etl_data.pull_request_id) s
WHERE s.pull_request_id = pull_requests.id;
                """
            )
Example #21
0
                    session
                        .query(PullRequests.number)
                        .filter(
                        and_(PullRequests.is_high_priority.isnot(None))
                    )
                        .all()
                )
                for r in record:
                    pull_requests_data.update(number=int(r.number))
        elif args.old:
            with session_scope() as session:
                try:
                    record = (
                        session
                        .query(PullRequests.updated_at)
                        .order_by(PullRequests.updated_at.desc())
                        .limit(1)
                        .one()
                    )
                    from_date = record.updated_at.date() - timedelta(days=1)
                except NoResultFound:
                    from_date = date(2000, 1, 1)
                pull_requests_data.update_all(newer_than=from_date,
                                              limit=args.limit)
        else:
            # All
            log.debug('All')
            pull_requests_data.update_all(limit=args.limit)

        polling_data.stop()
Example #22
0
                command.stamp(alembic_config, 'head')
            else:
                command.upgrade(alembic_config, 'head')

    except OperationalError as e:
        print(e)
        time.sleep(10)
        create_or_update_database(echo=echo)


def drop_database(echo=True):
    with session_scope(echo=echo) as session:
        Base.metadata.drop_all(session.connection())
        session.execute('DROP TABLE github.public.alembic_version;')


if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser(description='Change the schema')

    parser.add_argument('-d', dest='drop', default=False, action='store_true')
    args = parser.parse_args()
    confirm = input(f'Dropping tables on {get_url()}, are you sure? y/n')
    if confirm != 'y':
        log.debug('User did not input y why?')
        sys.exit(1)
    else:
        if args.drop:
            drop_database()
        create_or_update_database()