def post_or_update_ama(url, no_comment=False, no_r_tabled=False, dry_run=False, trust=5): r = get_reddit() if hasattr(url, 'id'): submission = url elif url.startswith('http://'): submission = r.get_submission(url=url) else: submission = r.get_submission(submission_id=url) dumbtitle = submission.title.lower().replace(' ', '') if ( trust < 8 and ('ask' not in dumbtitle or 'anything' not in dumbtitle) and 'ama' not in dumbtitle ): return -1, "Title does not sound like an AMA." if trust < 5 and old_enough_for_table(submission.created_utc): return -1, "As per /r/tabled rules, AMAs may only be tabled 12 hours after they started." qa = list(grab_questions(submission)) table = list(table_questions(qa)) if trust < 10 and len(table) < 15: # this ama is crap/not an ama/author deleted try: tt = TrackedTable.objects.get(parent=submission.id) tt.edited = datetime.utcnow() tt.save() except TrackedTable.DoesNotExist: pass return -1, "Less than 15 question/answers." last_answer = max(qa, key=lambda (q,a): a.created_utc)[1] last_answer_time = last_answer.created_utc try: tt = TrackedTable.objects.get(parent=submission.id) tt.edited = datetime.utcnow() tt.last_answer = datetime.utcfromtimestamp(last_answer_time) except TrackedTable.DoesNotExist: tt = TrackedTable(parent=submission.id) tt.last_answer = datetime.utcfromtimestamp(last_answer_time) tt.started = tt.edited = datetime.utcnow() if not dry_run: tt.save() def cb(): if not no_comment and ( old_enough_for_comment(submission.created_utc, last_answer_time) or tt.comment): post_table_comment(submission, table, tt, dry_run) if not no_r_tabled: post_table_submission(submission, table, tt, dry_run, cb=cb) return 0, ""
def top_iama(subreddits=('iama','internetama')): r = get_reddit() return iama_filter( remove_tracked(chain( *[ r.get_subreddit(subreddit).get_hot(limit=25) for subreddit in subreddits ])) )
def post_table_comment(submission, table, tt, dry_run=False, cb=None): r = get_reddit() msg = StringIO() linesep( table, file=msg, limit=3000, intro="""\ """, outro="""\ """ + ('**[View the full table]({0})** on'.format(tt.get_submission_url('tabled')) if tt.submission else 'More tables at') + """ [/r/tabled](/r/tabled)! | """ + last_updated(tt) + """ *This comment was generated by a robot! Send all complaints to [epsy](/message/compose/?to=epsy).* """ ) if dry_run: print('Editing comment ' + tt.comment if tt.comment else 'New comment') print('--- BEGIN COMMENT ---') print(msg.getvalue().encode('utf-8')) print('--- END COMMENT ---') else: if tt.comment: if tt.comment != '_': edit('t1_' + tt.comment, msg.getvalue()) else: def make_comment(): try: response = submission.add_comment( msg.getvalue() ) except errors.APIException as e: if e.error_type == 'TOO_OLD': tt.comment = '_' tt.save() else: raise else: tt.comment = response.id tt.save() print("Posted excerpt for {0}: {1}".format( tt.get_parent_url(), tt.get_comment_url(submission.subreddit.display_name) )) if cb: cb() ratelimit(make_comment)
def read_messages(): r = get_reddit() login() people = {} for (text, address, title, autolink, host), message in links_from_body( mark_as_read( new_messages() ) ): author = message.author.name linksleft = people.setdefault(author, 4) if linksleft: link = address or autolink m = reddit_re.match(link) if not m: continue subreddit, submissionid, preview, comment = m.groups() if not comment: yield "ama", link, message else: yield "verification", link, message people[author] = linksleft - 1
def new_messages(): r = get_reddit() login() return r.get_unread(limit=None)