def get_issue_details(): if not request.json.get('repo') or not request.json.get('issue_number'): abort(400) repo = request.json.get('repo') issue_number = request.json.get('issue_number') gh = Github(login_or_token=g.github_token, per_page=100) gh_repo = gh.get_repo(repo) issue = gh_repo.get_issue(issue_number) issue.repo = repo issue.unix_updated_at = arrow.get(issue.updated_at).timestamp issue.unix_created_at = arrow.get(issue.created_at).timestamp all_comments = [] for comment in issue.get_comments(): comment.repo = repo comment.issue_number = issue.number comment.unix_updated_at = arrow.get(comment.updated_at).timestamp comment.unix_created_at = arrow.get(comment.created_at).timestamp all_comments.append(comment) issue_schema = Issue() issue_result = issue_schema.dump(issue) comments_schema = Comment(many=True) comments_result = comments_schema.dump(all_comments) return jsonify(issue=issue_result.data, comments=comments_result.data)
def list_repos(): repos_watched = request.json.get('repos') gh = Github(login_or_token=g.github_token, per_page=100) all_issues = [] all_comments = [] for repo_id in repos_watched: repo = gh.get_repo(repo_id) for issue in repo.get_issues(): issue.repo = repo.full_name issue.unix_updated_at = arrow.get(issue.updated_at).timestamp issue.unix_created_at = arrow.get(issue.created_at).timestamp all_issues.append(issue) for comment in issue.get_comments(): comment.repo = repo.full_name comment.issue_number = issue.number comment.unix_updated_at = arrow.get(comment.updated_at).timestamp comment.unix_created_at = arrow.get(comment.created_at).timestamp all_comments.append(comment) all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True) all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True) issues = Issue(many=True) issues_result = issues.dump([r for r in all_issues]) comments = Comment(many=True) comments_results = comments.dump([c for c in all_comments]) return jsonify(issues=issues_result.data, comments=comments_results.data)
def process_emails(): app.logger.info('Running email thread.') for account in Account.query.all(): # Band-aid for being outside of request context token = AccessToken.query.filter_by(account_id=account.id).first() github_token = token.github_token timestamp_window = arrow.get(arrow.now().timestamp - 3600).datetime gh = Github(login_or_token=github_token, per_page=100) all_issues = [] all_comments = [] for repo_id in account.watchlist: repo = gh.get_repo(repo_id) for issue in repo.get_issues(since=timestamp_window): issue.repo = repo.full_name issue.unix_updated_at = arrow.get(issue.updated_at).timestamp issue.unix_created_at = arrow.get(issue.created_at).timestamp all_issues.append(issue) for comment in issue.get_comments(): comment.repo = repo.full_name comment.issue_number = issue.number comment.unix_updated_at = arrow.get(comment.updated_at).timestamp comment.unix_created_at = arrow.get(comment.created_at).timestamp all_comments.append(comment) all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True) all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True) all_comments = [a for a in all_comments if a.unix_updated_at < arrow.get(account.last_email).timestamp] issues = Issue(many=True) issues_result = issues.dump([r for r in all_issues]) comments = Comment(many=True) comments_results = comments.dump([c for c in all_comments]) num_comments = len(comments_results.data) num_issues = len(issues_result.data) if num_issues + num_comments >= 1: issues_format = '<li>Issue #{}: {} ({})</li>\n' issues_block = '' for i in all_issues: issues_block += issues_format.format(i.number, i.title, arrow.get(i.updated_at).humanize()) send_mandrill_email(account.email, 'email-digest', { 'num_comments': num_comments, 'num_issues': num_issues, 'issues_block': issues_block }) app.logger.info('sent an email.') account.last_email = arrow.now().datetime db.session.flush() db.session.commit()
def process_notifications(): app.logger.info('Running notification thread.') for account in Account.query.all(): if not account.last_email: account.last_email = arrow.now().datetime if not account.last_push: account.last_push = arrow.now().datetime # Band-aid for being outside of request context token = AccessToken.query.filter_by(account_id=account.id).first() github_token = token.github_token timestamp_window = arrow.get(arrow.now().timestamp - 30).datetime gh = Github(login_or_token=github_token, per_page=100) all_issues = [] all_comments = [] for repo_id in account.watchlist: repo = gh.get_repo(repo_id) for issue in repo.get_issues(since=timestamp_window): issue.repo = repo.full_name issue.unix_updated_at = arrow.get(issue.updated_at).timestamp issue.unix_created_at = arrow.get(issue.created_at).timestamp all_issues.append(issue) for comment in issue.get_comments(): comment.repo = repo.full_name comment.issue_number = issue.number comment.unix_updated_at = arrow.get( comment.updated_at).timestamp comment.unix_created_at = arrow.get( comment.created_at).timestamp all_comments.append(comment) all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True) all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True) all_comments = [ a for a in all_comments if a.unix_updated_at > arrow.get(account.last_push).timestamp ] issues = Issue(many=True) issues_result = issues.dump([r for r in all_issues]) comments = Comment(many=True) comments_results = comments.dump([c for c in all_comments]) message = 'There were {} updated issues and {} updated comments.'.format( len(issues_result.data), len(comments_results.data)) if len(comments_results.data) + len(issues_result.data) >= 1: send_notification(account.id, message) app.logger.info(message) account.last_push = arrow.now().datetime
def process_notifications(): app.logger.info('Running notification thread.') for account in Account.query.all(): if not account.last_email: account.last_email = arrow.now().datetime if not account.last_push: account.last_push = arrow.now().datetime # Band-aid for being outside of request context token = AccessToken.query.filter_by(account_id=account.id).first() github_token = token.github_token timestamp_window = arrow.get(arrow.now().timestamp - 30).datetime gh = Github(login_or_token=github_token, per_page=100) all_issues = [] all_comments = [] for repo_id in account.watchlist: repo = gh.get_repo(repo_id) for issue in repo.get_issues(since=timestamp_window): issue.repo = repo.full_name issue.unix_updated_at = arrow.get(issue.updated_at).timestamp issue.unix_created_at = arrow.get(issue.created_at).timestamp all_issues.append(issue) for comment in issue.get_comments(): comment.repo = repo.full_name comment.issue_number = issue.number comment.unix_updated_at = arrow.get(comment.updated_at).timestamp comment.unix_created_at = arrow.get(comment.created_at).timestamp all_comments.append(comment) all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True) all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True) all_comments = [a for a in all_comments if a.unix_updated_at > arrow.get(account.last_push).timestamp] issues = Issue(many=True) issues_result = issues.dump([r for r in all_issues]) comments = Comment(many=True) comments_results = comments.dump([c for c in all_comments]) message = 'There were {} updated issues and {} updated comments.'.format(len(issues_result.data), len(comments_results.data)) if len(comments_results.data) + len(issues_result.data) >= 1: send_notification(account.id, message) app.logger.info(message) account.last_push = arrow.now().datetime
def create_issue(): repo = request.json.get('repo') title = request.json.get('title') body = request.json.get('body') assigned_to = request.json.get('assigned_to') milestone_number = request.json.get('milestone_number') label_names = request.json.get('label_names') assigned_to_user, milestone, label_list = NotSet, NotSet, NotSet if not repo or not title: abort(400) if request.json.get('images'): body += '\n\n' for image in request.json.get('images'): image_bytes = base64.b64decode(image) filename = '{}.jpg'.format(hashlib.md5(image_bytes).hexdigest()) with open('scratch/{}'.format(filename), 'wb') as output: output.write(image_bytes) body += '{}/{}\n'.format( current_app.config.get('STATIC_ASSET_URL'), filename) gh = Github(login_or_token=g.github_token, per_page=100) gh_repo = gh.get_repo(repo) if assigned_to: assigned_to_user = assigned_to['login'] if milestone_number: milestone = gh_repo.get_milestone(milestone_number) if label_names: label_list = [gh_repo.get_label(l) for l in label_names] r = gh_repo.create_issue(title=title, body=body, assignee=assigned_to_user, milestone=milestone, labels=label_list) r.repo = repo r.unix_created_at = arrow.get(r.created_at).timestamp r.unix_updated_at = arrow.get(r.updated_at).timestamp issue_schema = Issue() issue_result = issue_schema.dump(r) return jsonify(created_issue=issue_result.data)
def create_issue(): repo = request.json.get('repo') title = request.json.get('title') body = request.json.get('body') assigned_to = request.json.get('assigned_to') milestone_number = request.json.get('milestone_number') label_names = request.json.get('label_names') assigned_to_user, milestone, label_list = NotSet, NotSet, NotSet if not repo or not title: abort(400) if request.json.get('images'): body += '\n\n' for image in request.json.get('images'): image_bytes = base64.b64decode(image) filename = '{}.jpg'.format(hashlib.md5(image_bytes).hexdigest()) with open('scratch/{}'.format(filename), 'wb') as output: output.write(image_bytes) body += '{}/{}\n'.format(current_app.config.get('STATIC_ASSET_URL'), filename) gh = Github(login_or_token=g.github_token, per_page=100) gh_repo = gh.get_repo(repo) if assigned_to: assigned_to_user = assigned_to['login'] if milestone_number: milestone = gh_repo.get_milestone(milestone_number) if label_names: label_list = [gh_repo.get_label(l) for l in label_names] r = gh_repo.create_issue(title=title, body=body, assignee=assigned_to_user, milestone=milestone, labels=label_list) r.repo = repo r.unix_created_at = arrow.get(r.created_at).timestamp r.unix_updated_at = arrow.get(r.updated_at).timestamp issue_schema = Issue() issue_result = issue_schema.dump(r) return jsonify(created_issue=issue_result.data)
def process_emails(): app.logger.info('Running email thread.') for account in Account.query.all(): # Band-aid for being outside of request context token = AccessToken.query.filter_by(account_id=account.id).first() github_token = token.github_token timestamp_window = arrow.get(arrow.now().timestamp - 3600).datetime gh = Github(login_or_token=github_token, per_page=100) all_issues = [] all_comments = [] for repo_id in account.watchlist: repo = gh.get_repo(repo_id) for issue in repo.get_issues(since=timestamp_window): issue.repo = repo.full_name issue.unix_updated_at = arrow.get(issue.updated_at).timestamp issue.unix_created_at = arrow.get(issue.created_at).timestamp all_issues.append(issue) for comment in issue.get_comments(): comment.repo = repo.full_name comment.issue_number = issue.number comment.unix_updated_at = arrow.get( comment.updated_at).timestamp comment.unix_created_at = arrow.get( comment.created_at).timestamp all_comments.append(comment) all_issues.sort(key=lambda i: i.unix_updated_at, reverse=True) all_comments.sort(key=lambda c: c.unix_updated_at, reverse=True) all_comments = [ a for a in all_comments if a.unix_updated_at < arrow.get(account.last_email).timestamp ] issues = Issue(many=True) issues_result = issues.dump([r for r in all_issues]) comments = Comment(many=True) comments_results = comments.dump([c for c in all_comments]) num_comments = len(comments_results.data) num_issues = len(issues_result.data) if num_issues + num_comments >= 1: issues_format = '<li>Issue #{}: {} ({})</li>\n' issues_block = '' for i in all_issues: issues_block += issues_format.format( i.number, i.title, arrow.get(i.updated_at).humanize()) send_mandrill_email( account.email, 'email-digest', { 'num_comments': num_comments, 'num_issues': num_issues, 'issues_block': issues_block }) app.logger.info('sent an email.') account.last_email = arrow.now().datetime db.session.flush() db.session.commit()