Пример #1
0
def create_comment():
    repo = request.json.get('repo')
    issue_number = request.json.get('issue_number')
    body = request.json.get('body')
    if not repo:
        abort(400)

    if request.json.get('images'):
        body += '\n\n'
        for image in request.json.get('images'):
            image_bytes = base64.b64decode(image)
            filename = '{}.jpg'.format(hashlib.md5(image_bytes).hexdigest())
            with open('scratch/{}'.format(filename), 'wb') as output:
                output.write(image_bytes)

            body += '{}/{}\n'.format(
                current_app.config.get('STATIC_ASSET_URL'), filename)

    gh = Github(login_or_token=g.github_token, per_page=100)
    gh_repo = gh.get_repo(repo)

    r = gh_repo.get_issue(issue_number)
    comment = r.create_comment(body)
    comment.repo = repo
    comment.issue_number = issue_number

    comment.unix_created_at = arrow.get(comment.created_at).timestamp
    comment.unix_updated_at = arrow.get(comment.updated_at).timestamp

    comment_schema = Comment()
    comment_result = comment_schema.dump(comment)

    return jsonify(created_comment=comment_result.data)
Пример #2
0
def get_issue_details():
    if not request.json.get('repo') or not request.json.get('issue_number'):
        abort(400)

    repo = request.json.get('repo')
    issue_number = request.json.get('issue_number')

    gh = Github(login_or_token=g.github_token, per_page=100)
    gh_repo = gh.get_repo(repo)

    issue = gh_repo.get_issue(issue_number)

    issue.repo = repo
    issue.unix_updated_at = arrow.get(issue.updated_at).timestamp
    issue.unix_created_at = arrow.get(issue.created_at).timestamp

    all_comments = []

    for comment in issue.get_comments():
        comment.repo = repo
        comment.issue_number = issue.number
        comment.unix_updated_at = arrow.get(comment.updated_at).timestamp
        comment.unix_created_at = arrow.get(comment.created_at).timestamp
        all_comments.append(comment)

    issue_schema = Issue()
    issue_result = issue_schema.dump(issue)

    comments_schema = Comment(many=True)
    comments_result = comments_schema.dump(all_comments)

    return jsonify(issue=issue_result.data, comments=comments_result.data)
Пример #3
0
def create_comment():
    repo = request.json.get('repo')
    issue_number = request.json.get('issue_number')
    body = request.json.get('body')
    if not repo:
        abort(400)

    if request.json.get('images'):
        body += '\n\n'
        for image in request.json.get('images'):
            image_bytes = base64.b64decode(image)
            filename = '{}.jpg'.format(hashlib.md5(image_bytes).hexdigest())
            with open('scratch/{}'.format(filename), 'wb') as output:
                output.write(image_bytes)

            body += '{}/{}\n'.format(current_app.config.get('STATIC_ASSET_URL'), filename)

    gh = Github(login_or_token=g.github_token, per_page=100)
    gh_repo = gh.get_repo(repo)

    r = gh_repo.get_issue(issue_number)
    comment = r.create_comment(body)
    comment.repo = repo
    comment.issue_number = issue_number

    comment.unix_created_at = arrow.get(comment.created_at).timestamp
    comment.unix_updated_at = arrow.get(comment.updated_at).timestamp

    comment_schema = Comment()
    comment_result = comment_schema.dump(comment)

    return jsonify(created_comment=comment_result.data)
Пример #4
0
def get_issue_details():
    if not request.json.get('repo') or not request.json.get('issue_number'):
        abort(400)

    repo = request.json.get('repo')
    issue_number = request.json.get('issue_number')

    gh = Github(login_or_token=g.github_token, per_page=100)
    gh_repo = gh.get_repo(repo)

    issue = gh_repo.get_issue(issue_number)

    issue.repo = repo
    issue.unix_updated_at = arrow.get(issue.updated_at).timestamp
    issue.unix_created_at = arrow.get(issue.created_at).timestamp

    all_comments = []

    for comment in issue.get_comments():
        comment.repo = repo
        comment.issue_number = issue.number
        comment.unix_updated_at = arrow.get(comment.updated_at).timestamp
        comment.unix_created_at = arrow.get(comment.created_at).timestamp
        all_comments.append(comment)

    issue_schema = Issue()
    issue_result = issue_schema.dump(issue)

    comments_schema = Comment(many=True)
    comments_result = comments_schema.dump(all_comments)

    return jsonify(issue=issue_result.data,
                   comments=comments_result.data)
Пример #5
0
def list_repos():
    repos_watched = request.json.get('repos')

    gh = Github(login_or_token=g.github_token, per_page=100)

    all_issues = []
    all_comments = []
    for repo_id in repos_watched:
        repo = gh.get_repo(repo_id)
        for issue in repo.get_issues():
            issue.repo = repo.full_name
            issue.unix_updated_at = arrow.get(issue.updated_at).timestamp
            issue.unix_created_at = arrow.get(issue.created_at).timestamp
            all_issues.append(issue)

            for comment in issue.get_comments():
                comment.repo = repo.full_name
                comment.issue_number = issue.number
                comment.unix_updated_at = arrow.get(comment.updated_at).timestamp
                comment.unix_created_at = arrow.get(comment.created_at).timestamp
                all_comments.append(comment)

    all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True)
    all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True)

    issues = Issue(many=True)
    issues_result = issues.dump([r for r in all_issues])

    comments = Comment(many=True)
    comments_results = comments.dump([c for c in all_comments])

    return jsonify(issues=issues_result.data,
                   comments=comments_results.data)
Пример #6
0
def process_emails():
    app.logger.info('Running email thread.')

    for account in Account.query.all():
        # Band-aid for being outside of request context
        token = AccessToken.query.filter_by(account_id=account.id).first()
        github_token = token.github_token

        timestamp_window = arrow.get(arrow.now().timestamp - 3600).datetime
        gh = Github(login_or_token=github_token, per_page=100)

        all_issues = []
        all_comments = []
        for repo_id in account.watchlist:
            repo = gh.get_repo(repo_id)
            for issue in repo.get_issues(since=timestamp_window):
                issue.repo = repo.full_name
                issue.unix_updated_at = arrow.get(issue.updated_at).timestamp
                issue.unix_created_at = arrow.get(issue.created_at).timestamp
                all_issues.append(issue)

                for comment in issue.get_comments():
                    comment.repo = repo.full_name
                    comment.issue_number = issue.number
                    comment.unix_updated_at = arrow.get(comment.updated_at).timestamp
                    comment.unix_created_at = arrow.get(comment.created_at).timestamp
                    all_comments.append(comment)

            all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True)
            all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True)

            all_comments = [a for a in all_comments if a.unix_updated_at < arrow.get(account.last_email).timestamp]

            issues = Issue(many=True)
            issues_result = issues.dump([r for r in all_issues])

            comments = Comment(many=True)
            comments_results = comments.dump([c for c in all_comments])

            num_comments = len(comments_results.data)
            num_issues = len(issues_result.data)

            if num_issues + num_comments >= 1:
                issues_format = '<li>Issue #{}: {} ({})</li>\n'
                issues_block = ''
                for i in all_issues:
                    issues_block += issues_format.format(i.number, i.title, arrow.get(i.updated_at).humanize())

                send_mandrill_email(account.email, 'email-digest', {
                    'num_comments': num_comments,
                    'num_issues': num_issues,
                    'issues_block': issues_block
                })

                app.logger.info('sent an email.')

            account.last_email = arrow.now().datetime
            db.session.flush()
            db.session.commit()
Пример #7
0
def process_notifications():
    app.logger.info('Running notification thread.')

    for account in Account.query.all():
        if not account.last_email:
            account.last_email = arrow.now().datetime

        if not account.last_push:
            account.last_push = arrow.now().datetime

        # Band-aid for being outside of request context
        token = AccessToken.query.filter_by(account_id=account.id).first()
        github_token = token.github_token

        timestamp_window = arrow.get(arrow.now().timestamp - 30).datetime
        gh = Github(login_or_token=github_token, per_page=100)

        all_issues = []
        all_comments = []
        for repo_id in account.watchlist:
            repo = gh.get_repo(repo_id)
            for issue in repo.get_issues(since=timestamp_window):
                issue.repo = repo.full_name
                issue.unix_updated_at = arrow.get(issue.updated_at).timestamp
                issue.unix_created_at = arrow.get(issue.created_at).timestamp
                all_issues.append(issue)

                for comment in issue.get_comments():
                    comment.repo = repo.full_name
                    comment.issue_number = issue.number
                    comment.unix_updated_at = arrow.get(
                        comment.updated_at).timestamp
                    comment.unix_created_at = arrow.get(
                        comment.created_at).timestamp
                    all_comments.append(comment)

        all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True)
        all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True)

        all_comments = [
            a for a in all_comments
            if a.unix_updated_at > arrow.get(account.last_push).timestamp
        ]

        issues = Issue(many=True)
        issues_result = issues.dump([r for r in all_issues])

        comments = Comment(many=True)
        comments_results = comments.dump([c for c in all_comments])

        message = 'There were {} updated issues and {} updated comments.'.format(
            len(issues_result.data), len(comments_results.data))
        if len(comments_results.data) + len(issues_result.data) >= 1:
            send_notification(account.id, message)
            app.logger.info(message)

        account.last_push = arrow.now().datetime
Пример #8
0
def process_notifications():
    app.logger.info('Running notification thread.')

    for account in Account.query.all():
        if not account.last_email:
            account.last_email = arrow.now().datetime

        if not account.last_push:
            account.last_push = arrow.now().datetime

        # Band-aid for being outside of request context
        token = AccessToken.query.filter_by(account_id=account.id).first()
        github_token = token.github_token

        timestamp_window = arrow.get(arrow.now().timestamp - 30).datetime
        gh = Github(login_or_token=github_token, per_page=100)

        all_issues = []
        all_comments = []
        for repo_id in account.watchlist:
            repo = gh.get_repo(repo_id)
            for issue in repo.get_issues(since=timestamp_window):
                issue.repo = repo.full_name
                issue.unix_updated_at = arrow.get(issue.updated_at).timestamp
                issue.unix_created_at = arrow.get(issue.created_at).timestamp
                all_issues.append(issue)

                for comment in issue.get_comments():
                    comment.repo = repo.full_name
                    comment.issue_number = issue.number
                    comment.unix_updated_at = arrow.get(comment.updated_at).timestamp
                    comment.unix_created_at = arrow.get(comment.created_at).timestamp
                    all_comments.append(comment)

        all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True)
        all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True)

        all_comments = [a for a in all_comments if a.unix_updated_at > arrow.get(account.last_push).timestamp]

        issues = Issue(many=True)
        issues_result = issues.dump([r for r in all_issues])

        comments = Comment(many=True)
        comments_results = comments.dump([c for c in all_comments])

        message = 'There were {} updated issues and {} updated comments.'.format(len(issues_result.data),
                                                                                 len(comments_results.data))
        if len(comments_results.data) + len(issues_result.data) >= 1:
            send_notification(account.id, message)
            app.logger.info(message)

        account.last_push = arrow.now().datetime
Пример #9
0
def process_emails():
    app.logger.info('Running email thread.')

    for account in Account.query.all():
        # Band-aid for being outside of request context
        token = AccessToken.query.filter_by(account_id=account.id).first()
        github_token = token.github_token

        timestamp_window = arrow.get(arrow.now().timestamp - 3600).datetime
        gh = Github(login_or_token=github_token, per_page=100)

        all_issues = []
        all_comments = []
        for repo_id in account.watchlist:
            repo = gh.get_repo(repo_id)
            for issue in repo.get_issues(since=timestamp_window):
                issue.repo = repo.full_name
                issue.unix_updated_at = arrow.get(issue.updated_at).timestamp
                issue.unix_created_at = arrow.get(issue.created_at).timestamp
                all_issues.append(issue)

                for comment in issue.get_comments():
                    comment.repo = repo.full_name
                    comment.issue_number = issue.number
                    comment.unix_updated_at = arrow.get(
                        comment.updated_at).timestamp
                    comment.unix_created_at = arrow.get(
                        comment.created_at).timestamp
                    all_comments.append(comment)

            all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True)
            all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True)

            all_comments = [
                a for a in all_comments
                if a.unix_updated_at < arrow.get(account.last_email).timestamp
            ]

            issues = Issue(many=True)
            issues_result = issues.dump([r for r in all_issues])

            comments = Comment(many=True)
            comments_results = comments.dump([c for c in all_comments])

            num_comments = len(comments_results.data)
            num_issues = len(issues_result.data)

            if num_issues + num_comments >= 1:
                issues_format = '<li>Issue #{}: {} ({})</li>\n'
                issues_block = ''
                for i in all_issues:
                    issues_block += issues_format.format(
                        i.number, i.title,
                        arrow.get(i.updated_at).humanize())

                send_mandrill_email(
                    account.email, 'email-digest', {
                        'num_comments': num_comments,
                        'num_issues': num_issues,
                        'issues_block': issues_block
                    })

                app.logger.info('sent an email.')

            account.last_email = arrow.now().datetime
            db.session.flush()
            db.session.commit()