def process_messages(reddit, database): messages = reddit.get_messages() if len(messages): log.debug(f"Processing {len(messages)} messages") i = 0 for message in messages[::-1]: i += 1 mark_read = True if reddit.is_message(message): if message.author is None: log.info(f"Message {message.id} is a system notification") elif message.author.name == "reddit": log.info(f"Message {message.id} is from reddit, skipping") else: try: process_message(message, reddit, database, f"{i}/{len(messages)}") except Exception as err: mark_read = not utils.process_error( f"Error processing message: {message.id} : u/{message.author.name}", err, traceback.format_exc()) finally: database.commit() else: log.info(f"Object not message, skipping: {message.id}") if mark_read: try: reddit.mark_read(message) except Exception as err: utils.process_error( f"Error marking message read: {message.id} : {message.author.name}", err, traceback.format_exc()) return len(messages)
def check_stock(t_id, user_id, request, request_type): stock = request.args.get('stock', default='', type=str) cmd = Command(TransactionID=int(t_id), C_type=request_type, UserId=user_id, Timestamp=time.time(), StockSymbol=stock) if stock == "": return process_error(audit, cmd, "Parameter: 'stock' cannot be empty.") return send_cmd(cmd)
def process_comments(reddit, database): comments = reddit.get_keyword_comments( static.TRIGGER_COMBINED, database_get_seen(database).replace(tzinfo=None)) counters.pushshift_delay.labels(client="prod").set( reddit.pushshift_prod_client.lag_minutes()) counters.pushshift_delay.labels(client="beta").set( reddit.pushshift_beta_client.lag_minutes()) counters.pushshift_delay.labels(client="auto").set( reddit.get_effective_pushshift_lag()) if reddit.recent_pushshift_client == PushshiftType.PROD: counters.pushshift_client.labels(client="prod").set(1) counters.pushshift_client.labels(client="beta").set(0) elif reddit.recent_pushshift_client == PushshiftType.BETA: counters.pushshift_client.labels(client="prod").set(0) counters.pushshift_client.labels(client="beta").set(1) else: counters.pushshift_client.labels(client="prod").set(0) counters.pushshift_client.labels(client="beta").set(0) counters.pushshift_failed.labels( client="prod").set(1 if reddit.pushshift_prod_client.failed() else 0) counters.pushshift_failed.labels( client="beta").set(1 if reddit.pushshift_beta_client.failed() else 0) counters.pushshift_seconds.labels("prod").observe( reddit.pushshift_prod_client.request_seconds) counters.pushshift_seconds.labels("beta").observe( reddit.pushshift_beta_client.request_seconds) if len(comments): log.debug(f"Processing {len(comments)} comments") i = 0 for comment in comments[::-1]: i += 1 mark_read = True try: process_comment(comment, reddit, database, f"{i}/{len(comments)}") except Exception as err: mark_read = not utils.process_error( f"Error processing comment: {comment['id']} : {comment['author']}", err, traceback.format_exc()) if mark_read: reddit.mark_keyword_comment_processed(comment['id']) database_set_seen( database, utils.datetime_from_timestamp(comment['created_utc'])) else: return i return len(comments)
def check_amount(t_id, user_id, request, request_type): amount = request.args.get('amount', type=int) cmd0 = Command(TransactionID=int(t_id), C_type=request_type, UserId=user_id, Timestamp=time.time()) if amount == "": return process_error(audit, cmd0, "Parameter: 'amount' cannot be empty.") cmd = Command(TransactionID=int(t_id), C_type=request_type, UserId=user_id, Timestamp=time.time(), Amount=amount) return send_cmd(cmd)
def dumplog(t_id, user_id): filename = request.form["filename"] cmd = Command(TransactionID=int(t_id), C_type=Cmd.DUMPLOG.value, UserId=user_id, Timestamp=time.time(), FileName=filename) if filename == "" and filename != "admin": return process_error(audit, cmd, "Parameter: 'filename' cannot be empty.") log = executor.submit(audit.UserCommand, *(cmd, )) resp = queue.PutItem(hash(user_id), cmd) log.result() return json.dumps(thrift_to_json(resp)) if type(resp) is Response else resp
startTime = time.perf_counter() log.debug("Starting run") actions = 0 errors = 0 counters.objects.labels(type="reminders").set(database.get_count_all_reminders()) counters.objects.labels(type="comments").set(database.get_count_all_comments()) counters.objects.labels(type="users").set(database.get_count_all_users()) counters.objects.labels(type="subreddits").set(database.get_count_all_subreddits()) counters.objects.labels(type="subreddits_banned").set(database.get_count_banned_subreddits()) try: actions += messages.process_messages(reddit, database) except Exception as err: utils.process_error(f"Error processing messages", err, traceback.format_exc()) errors += 1 try: actions += comments.process_comments(reddit, database) except Exception as err: utils.process_error(f"Error processing comments", err, traceback.format_exc()) errors += 1 try: actions += notifications.send_reminders(reddit, database) except Exception as err: utils.process_error(f"Error sending notifications", err, traceback.format_exc()) errors += 1 if utils.time_offset(last_comments, minutes=30):
parser.add_argument("--once", help="Only run the loop once", action='store_const', const=True, default=False) args = parser.parse_args() reddit = praw.Reddit(args.user) discord_logging.init_discord_logging(args.user, logging.WARNING, 1) counters.init(8004) database.init() log.info(f"Starting up: u/{args.user}") while True: try: main(reddit) except Exception as err: utils.process_error(f"Error in main loop", err, traceback.format_exc()) discord_logging.flush_discord() if args.once: database.session.close() break time.sleep(60)
shared.check_flair_changes(subreddit, database) shared.backfill_karma(subreddit, database) for subreddit in [comp_ow, bay_area]: shared.count_queues(subreddit) for subreddit in [comp_ow]: shared.ping_queues(subreddit, database) if last_backup is None or last_backup < datetime.utcnow() - timedelta(hours=24): try: log.info("Backing up database") database.backup() last_backup = datetime.utcnow() except Exception as err: utils.process_error(f"Error backing up database", err, traceback.format_exc()) except Exception as err: utils.process_error(f"Hit an error in main loop", err, traceback.format_exc()) delta_time = time.perf_counter() - loop_time counters.loop_time.observe(round(delta_time, 2)) log.debug("Run complete after: %d", int(delta_time)) database.session.commit() discord_logging.flush_discord() if args.once: break time.sleep(1 * 60)
def error(self, cmd, msg): return process_error(self._audit, cmd, msg)
def error(self, cmd, msg): process_error(self._audit, cmd, msg) return Response(Success=False, Message=msg)