def add_article_to_dropbox(event=None, handler=None): dbx = dropbox.Dropbox(get_credential("dropbox")) data = event.data markdown = [] markdown.append('## ' + data['title']) markdown.append('**Keywords**: ' + ', '.join(data['keywords'])) markdown.append('url: ' + data['url']) if len(data['notes']) > 0: markdown.append('### Notes') for note in data['notes']: markdown.append('- ' + note) markdown.append('### Summary') markdown.append(data['summary']) upload_path = '/notes/articles/{}.md'.format(data['category']) existing_file = None try: # if the file doesn't exist, the api throws an exception f = dbx.files_download(upload_path)[1] existing_file = f.content.decode('utf-8') except: pass to_upload = '\n\n'.join(markdown) if existing_file: to_upload = existing_file + '\n\n\n\n' + to_upload dbx.files_upload(to_upload.encode('utf-8'), upload_path, mode=dropbox.files.WriteMode.overwrite)
def github_notifications(handler=None, event=None): gtoken = get_credential('github_token') g = Github(gtoken) user = g.get_user() # all=False is broken # waiting for https://github.com/PyGithub/PyGithub/issues/1671 notifications = user.get_notifications(all=False) added_notifications = [] logging.info('Checking for new github notifications...') for n in notifications: if n.unread: if n.reason == 'review_requested': pr = n.get_pull_request() handler.publish('new_pr_review', f'pr_review: {pr.id}', pr) added_notifications.append(n) if n.reason == 'assign': issue = n.get_issue() handler.publish('new_issue_assigned', f'issue_assigned: {issue.id}', issue) added_notifications.append(n) if len(added_notifications) > 0: logging.info("Marking notifications as read") for n in added_notifications: n.mark_as_read() else: logging.info("No new notifications found")
def todoist_reading_list(handler=None): todoist = TodoistAPI(get_credential('todoist_token')) todoist.sync() reading_list = t_utils.get_project_by_name(todoist, 'reading list') categories = t_utils.get_child_projects(todoist, reading_list) for task in todoist.state['items']: for project in categories: if task['project_id'] == project['id']: content = task['content'] logging.info(content) m = re.search(r'\[([^\[]+)\]\((.*)\)', content) # The todoist app stores links as either a markdown formatted link or "title - url" # if markdown links fail, try to parse the "title - url" format. if m: logging.info("markdown group") title = m.group(1) url = m.group(2) logging.info(title) logging.info(url) else: logging.info("hyphen group") content_components = content.split(" - ") if len(content_components) > 1: title = ''.join(content_components[:-1]) url = content_components[-1].strip() logging.info(title) logging.info(url) else: task.update(content="FAILED TO PARSE: " + content) task.move(parent_id=reading_list['id']) comments = t_utils.get_comments_for_task(todoist, task) article = article_parser.parse_url(url) data = { 'url': url, 'title': title, 'summary': article.summary, 'keywords': article.keywords, 'text': article.text, 'published_date': article.publish_date, 'notes': comments, 'category': project['name'] } handler.publish('archive_article', url, data) task.complete() todoist.commit()
def add_issue_to_todoist(event=None, handler=None): todoist = TodoistAPI(get_credential('todoist_token')) todoist.sync() issue_project = t_utils.get_project_by_name(todoist, 'issues') r = event.data todoist.items.add( '{} [#{}]({})'.format(r['title'], r['number'], r['html_url']), project_id=issue_project['id'], ) # todo close todos for prs/issues that are no longer active todoist.commit()
def weather(handler=None): lat = '42.358429' lon = '-71.059769' token = get_credential('open_weather_map') r = requests.get('https://api.openweathermap.org/data/2.5/onecall?' f'lat={lat}&' f'lon={lon}&' f'appid={token}') m = hashlib.sha1() m.update(r.text.encode('utf-8')) handler.publish('owm_onecall', m.hexdigest(), r.json())
def add_mention_to_todoist(event=None, handler=None): todoist = TodoistAPI(get_credential('todoist_token')) todoist.sync() mention_project = t_utils.get_project_by_name(todoist, 'GH Mentions') r = event.data todoist.items.add( 'GH Mention - {} [#{}]({})'.format(r['title'], r['number'], r['url']), # project_id=mention_project['id'], # auto_reminder=True, # due={"string": "next workday at 9am"}, priority=4) # todo close todos for prs/issues that are no longer active todoist.commit()
def add_issue_to_todoist(event=None, handler=None): todoist = TodoistAPI(get_credential('todoist_token')) todoist.sync() issue_project = None for p in todoist.state['projects']: if p['name'].lower() == 'issues': issue_project = p break r = event.data todoist.items.add( '{} [#{}]({})'.format(r.title, r.number , r.html_url), project_id=issue_project['id'], ) # todo close todos for prs/issues that are no longer active todoist.commit()
def add_pr_to_todoist(event=None, handler=None): todoist = TodoistAPI(get_credential('todoist_token')) todoist.sync() review_project = None for p in todoist.state['projects']: if p['name'].lower() == 'review requests': review_project = p break r = event.data todoist.items.add('{} [{} #{}]({})'.format(r.title, r.base.repo.full_name, r.number, r.html_url), project_id=review_project['id'], auto_reminder=True, due={"string": "next workday at 9am"}, priority=4) # todo close todos for prs/issues that are no longer active todoist.commit()
def add_pr_to_todoist(event=None, handler=None): todoist = TodoistAPI(get_credential('todoist_token')) todoist.sync() # review_project = t_utils.get_project_by_name(todoist, 'review requests') r = event.data title = 'PR' if r['base']['repo']['full_name'] == 'pulp/pulp_ansible': title = 'Pulp Ansible PR' todoist.items.add( '{} - {} [{} #{}]({})'.format(title, r['title'], r['base']['repo']['full_name'], r['number'], r['html_url']), # project_id=review_project['id'], # auto_reminder=True, # due={"string": "next workday at 9am"}, # priority=4 ) # todo close todos for prs/issues that are no longer active todoist.commit()
def github_notifications(handler=None, event=None): gtoken = get_credential('github_token') g = Github(gtoken) user = g.get_user() # all=False is broken # waiting for https://github.com/PyGithub/PyGithub/issues/1671 notifications = user.get_notifications(all=False) added_notifications = [] logging.info('Checking for new github notifications...') for n in notifications: if n.unread: if n.reason == 'review_requested': try: pr = n.get_pull_request() handler.publish('new_pr_review', f'pr_review-{pr.id}', pr._rawData) except: logging.error(f'Could not retrieve pr at {n.subject.url}') added_notifications.append(n) if n.reason == 'subscribed': try: if n.subject.type == 'PullRequest' and n.repository.full_name == 'pulp/pulp_ansible': pr = n.get_pull_request() handler.publish('new_pr_review', f'pr_review-{pr.id}', pr._rawData) except: logging.error( f'Failed to retrieve mention {n.subject.type} at {n.subject.url}' ) added_notifications.append(n) elif n.reason == 'assign': try: issue = n.get_issue() handler.publish('new_issue_assigned', f'issue_assigned-{issue.id}', issue._rawData) except: logging.error( f'Could not retrieve issue at {n.subject.url}') added_notifications.append(n) elif n.reason == 'mention': try: if n.subject.type == 'Issue': issue = n.get_issue() handler.publish( 'new_github_at_mention', f'github_at_mention-{n.id}', { 'title': issue.title, 'url': issue.html_url, 'number': issue.number }) if n.subject.type == 'PullRequest': pr = n.get_pull_request() handler.publish( 'new_github_at_mention', f'github_at_mention-{n.id}', { 'title': pr.title, 'url': pr.html_url, 'number': pr.number }) except: logging.error( f'Failed to retrieve mention {n.subject.type} at {n.subject.url}' ) added_notifications.append(n) # if len(added_notifications) > 0: # logging.info("Marking notifications as read") # for n in added_notifications: # n.mark_as_read() else: logging.info("No new notifications found")