def remind(data,msg,r,mail): logging.debug("Remind Command") running_username = str(data["running_username"]).lower() reminder = True lines = separate_mail(mail.body) for line in lines: links = r.get_submission(line).comments for comment in links: if comment.replies: if comments.check_already_replied(data,msg["confirmation"],comment.replies,running_username): logging.info("Already Confirmed") reminder = False elif comments.check_already_replied(data,msg["error_length"],comment.replies,running_username): if comment.edited: comments.process_comments(data,msg,r,links) reminder = False elif comments.check_already_replied(data,msg["error_bad_recipient"],comment.replies,running_username): logging.info("Already Notified - Bad Recipient") reminder = False elif comments.check_already_replied(data,msg["error_submission_history"],comment.replies,running_username): logging.info("Already Notified - Submission History Error") reminder = False elif comments.check_already_replied(data,msg["remind"],comment.replies,running_username): logging.info("Already Reminded") reminder = False wait() if reminder: for comment in links: if comment.author: if str(comment.author.name).lower() != str(data["running_username"]).lower(): logging.info("User has been sent a reminder.") messages.remind(comment) #comment.reply(msg["remind"]).distinguish() else: logging.info("Silly person tried to remind me how to do my job.")
def read_comment_reply(data,msg,r,mail): logging.debug("Reading the reply to my comment.") bots_comment = r.get_info(thing_id=mail.parent_id) orig_comment = r.get_info(thing_id=bots_comment.parent_id) link = r.get_submission(orig_comment.permalink).comments if str(msg["confirmation"]).lower()[0:15] not in str(bots_comment.body).lower(): logging.debug("Rescanning Comment") comments.process_comments(data,msg,r,link) else: logging.debug("This comment did not need rescanned.")
def add(data,msg,r,mail): logging.debug("Add Command") proceed = True lines = separate_mail(mail.body) for line in lines: try: links = r.get_submission(line).comments proceed = True except: logging.error("No Link Found in: %s" % line) proceed = False if proceed == True: comments.process_comments(data,msg,r,links) wait() r.send_message(mail.author.name,"Add Complete","The Add command has been completed for:\n\n%s" % mail.body)
errors = 0 counters.objects.labels(type="reminders").set(database.get_count_all_reminders()) counters.objects.labels(type="comments").set(database.get_count_all_comments()) counters.objects.labels(type="users").set(database.get_count_all_users()) counters.objects.labels(type="subreddits").set(database.get_count_all_subreddits()) counters.objects.labels(type="subreddits_banned").set(database.get_count_banned_subreddits()) try: actions += messages.process_messages(reddit, database) except Exception as err: utils.process_error(f"Error processing messages", err, traceback.format_exc()) errors += 1 try: actions += comments.process_comments(reddit, database) except Exception as err: utils.process_error(f"Error processing comments", err, traceback.format_exc()) errors += 1 try: actions += notifications.send_reminders(reddit, database) except Exception as err: utils.process_error(f"Error sending notifications", err, traceback.format_exc()) errors += 1 if utils.time_offset(last_comments, minutes=30): try: comments.update_comments(reddit, database) last_comments = utils.datetime_now() except Exception as err:
def run(conn, reddit, created_utc, comment_id): try: # Build the URL to request comment_url = build_url({ "q": static.COMMAND, "size": 250, "sort": "asc", "filter": ",".join(["id", "author", "created_utc", "body"]), "min_created_utc": created_utc }) print( f"Running query with starting utc {created_utc} and comment_id {comment_id}\n" ) # Request and parse the response parsed_comment_json = requests.get(comment_url, headers=request_headers).json() comments_data = parsed_comment_json["data"] # Process comments if any were found if len(comments_data) > 0: # Try to find the index of the last processed comment, if present remove all items before it and itslef # as those were crated in the same epoch and must have been processed in an earlier cycle index_of_last_comment = find_index_of_comment_with_id( comments_data, comment_id) if index_of_last_comment is not None: comments_data = comments_data[index_of_last_comment + 1:None] # If no more comments are available after slicing by the comment id this means # that this cycle has caught up with all previously created comments and # the created_utc can be moved up by one so the next cycle will not re-request the ones from this epoch update_cur = conn.cursor() if len(comments_data) == 0: update_cur.execute("UPDATE last_comment SET created_utc = %s", (int(created_utc) + 1, )) conn.commit() update_cur.close() return (str(created_utc), str(comment_id)) # Update last comment time and comment id when any comments were recieved last_comment = comments_data[-1] created_utc = last_comment["created_utc"] comment_id = last_comment["id"] # Update the last comment time in DB so if the bot restarts it can read that value and start where it left off update_cur.execute( "UPDATE last_comment SET created_utc = %s, comment_id = %s", (created_utc, comment_id)) conn.commit() update_cur.close() comments.process_comments(conn, reddit, comments_data) except Exception as e: logging.exception( "Fetching comments failed, pushshift API probably is down") return (str(created_utc), str(comment_id))