def to_dict(self): return { 'user_id': self.user_id, 'timestamp': utctimestamp(self.timestamp), 'remind_time': utctimestamp(self.remind_time), 'message': self.message }
async def get_submission(self, reddit_id: str) -> reddit.models.Submission: """ Get the submission from cache or from the reddit API (if not in cache or expired). :param reddit_id: :return: :raise reddit.DeletedError: submission is removed/deleted """ try: s, load_time = self.submission_cache[reddit_id] if utctimestamp( datetime.utcnow()) - load_time > self.cache_expiry_delta: await s.load() self.submission_cache[reddit_id] = (s, utctimestamp( datetime.utcnow())) except KeyError: s = await self.reddit.submission(reddit_id) self.submission_cache[reddit_id] = (s, utctimestamp( datetime.utcnow())) if getattr(s, 'removed_by_category', None) is not None: raise reddit.DeletedError(s, s.removed_by_category) elif not getattr(s, 'is_robot_indexable', True): raise reddit.DeletedError(s, 'unknown') return s
async def queue_add(self, ctx, *, daterange: str): """ [MOD ONLY] Add a spotlight application scheduled for a given date range. The currently selected spotlight will be added. Use `.spotlight select` or `.spotlight roll` to change the currently selected spotlight. NOTE: KazTron will not take any action on the scheduled date. It is purely informational, intended for the bot operator, as well as determining the order of the queue. TIP: You can add the same Spotlight application to the queue multiple times (e.g. on different dates). To edit the date instead, use `.spotlight queue edit`. Arguments: * `daterange`: Required, string. A string in the form "date1 to date2". Each date can be in one of these formats: * An exact date: "2017-12-25", "25 December 2017", "December 25, 2017" * A partial date: "April 23" * A time expression: "tomorrow", "next week", "in 5 days". Does **not** accept days of the week ("next Tuesday"). Examples: * `.spotlight queue add 2018-01-25 to 2018-01-26` * `.spotlight queue add april 3 to april 5` """ logger.debug("queue add: {}".format(message_log_str(ctx.message))) self._load_applications() try: dates = parse_daterange(daterange) except ValueError as e: raise commands.BadArgument(e.args[0]) from e try: app = await self._get_current() except IndexError: return # already handled by _get_current queue_item = { 'index': self.current_app_index, 'start': utctimestamp(dates[0]), 'end': utctimestamp(dates[1]) } self.queue_data.append(queue_item) logger.info("queue add: added #{:d} from current select at {} to {}" .format(self.current_app_index + 1, dates[0].isoformat(' '), dates[1].isoformat(' '))) self.sort_queue() queue_index = self.queue_data.index(queue_item) # find the new position now self._write_db() start, end = self.format_date_range(dates[0], dates[1]) await self.bot.say(self.QUEUE_CHANGED_FMT.format( msg=self.QUEUE_ADD_HEADING, i=queue_index+1, id=queue_item['index'] + 1, start=start, end=end, app=app.discord_str() ))
def to_dict(self): data = { 'user_id': self.user_id, 'channel_id': self.channel_id, 'timestamp': utctimestamp(self.timestamp), 'remind_time': utctimestamp(self.remind_time), 'renew': self.renew_data.to_dict() if self.renew_data else None, 'pin': self.pin, 'message': self.message } return data
async def task_check_reddit(self): """ Checks all subreddits configured. """ sub_set = self._get_all_subreddits() if not sub_set: return # none configured logger.debug("Checking for new posts in subreddits: {}".format( ', '.join(sub_set))) with self.cog_state as state: count = 0 last_checked = utctimestamp(state.last_checked) last_timestamp = last_checked # last processed submission timestamp async for submission in self.stream_manager.stream(): # if an old submission / already checked, skip it if self.stream_manager.is_fresh and submission.created_utc <= last_checked: continue self.queue_manager.add(submission) last_timestamp = submission.created_utc logger.debug("Found post: {}".format( self.log_submission(submission))) count += 1 logger.info("Found {} new posts in subreddits: {}".format( count, ', '.join(sub_set))) state.last_checked = datetime.utcfromtimestamp(last_timestamp) await self._post_all_channels()
async def stream(self) -> AsyncGenerator[reddit.models.Submission, None]: """ Generator of new reddit posts. Should be async iterated. After a :meth:`~.refresh()`, setting :attr:`~.subreddits`, or hitting the :attr:`~.renewal_threshold`, this will load a number of recent posts instead of restarting from the latest post. """ has_results = False if self.is_fresh: sr = await self.reddit.subreddit( display_name='+'.join(self.subreddits)) self._stream = sr.stream.submissions(pause_after=0) async for submission in self._stream: if submission is None: if has_results: self.no_result_count = 0 else: self.no_result_count += 1 break has_results = True self.submission_cache[submission.id] = (submission, utctimestamp( datetime.utcnow())) yield submission self._is_fresh = False if self.no_result_count >= self.renewal_threshold: self.refresh()
def capture_timed_event_end(self, timestamp: datetime, type_: EventType, user: discord.Member, channel: discord.Channel): key = self._make_tuple(type_, user, channel) try: start_time = self.start_times[key] del self.start_times[key] except KeyError: pass else: try: self.data[key] += int( utctimestamp(timestamp) - start_time + 0.5) except KeyError: self.data[key] = int( utctimestamp(timestamp) - start_time + 0.5)
def loop2timestamp(loop_time: float, loop: asyncio.AbstractEventLoop = None) -> float: if loop is None: loop = asyncio.get_event_loop() now_loop = loop.time() now_timestamp = utctimestamp(datetime.utcnow()) return now_timestamp + (loop_time - now_loop)
async def task_check_reddit(self): """ Checks all subreddits configured. """ sub_set = self._get_all_subreddits() if not sub_set: return # none configured with self.cog_state as state: count = 0 last_checked = utctimestamp(state.last_checked) last_timestamp = last_checked # last processed submission timestamp async for submission in self.stream_manager.stream(): # if an old submission / already checked, skip it if self.stream_manager.is_fresh and submission.created_utc <= last_checked: continue self.queue_manager.add(submission) last_timestamp = submission.created_utc logger.debug("Found post: {}".format( self.log_submission(submission))) count += 1 if count > 0: logger.info("Found {} posts in subreddits: {}".format( count, ', '.join(sub_set))) else: logger.debug("Found 0 posts in subreddits: {}".format( ', '.join(sub_set))) # issue #339: if an older post is un-removed and detected, we want to avoid # re-posting posts that came after that older post if last_timestamp > last_checked: state.last_checked = datetime.utcfromtimestamp(last_timestamp) await self._post_all_channels()
def to_dict(self): return { "words": self.words, "rate_acc": self.rate_acc.dump_state(), "time": self.time, "wins": self.wins, "since": utctimestamp(self.since) }
def to_dict(self): return { 'interval': self.interval.total_seconds(), 'limit': self.limit, 'limit_time': utctimestamp(self.limit_time) if self.limit_time else None }
def to_dict(self): return { 'founder': self.founder.id if self.founder else None, 'members': [u.id for u in self.members], 'start': copy.deepcopy(self.start), 'end': copy.deepcopy(self.end), 'finalized': list(self.finalized), 'start_time': utctimestamp(self.start_time) if self.start_time else 0, 'end_time': utctimestamp(self.end_time) if self.end_time else 0, 'warn_times': [utctimestamp(t) for t in self.warn_times], 'finalize_time': utctimestamp(self.finalize_time) if self.finalize_time else 0, }
def to_dict(self): """ Convert this object to a dict suitable for JSON serialisation. """ ser_data = {} for k, v in self.data.items(): serializer = self.serializers.get(k, None) ser_data[k] = serializer( v) if serializer and v is not None else v return { 'user_id': self.user_id, 'timestamp': utctimestamp(self.timestamp), 'data': ser_data }
def to_dict(self): return { 'data': [(k[0].name, k[1], k[2], v) for k, v in self.data.items()], 'start_time_data': [(k[0].name, k[1], k[2], v) for k, v in self.start_times.items()], 'period': utctimestamp(self.period), 'salt': binascii.b2a_base64(self.salt).decode(), 'hash_name': self.hash_name, 'hash_iters': self.hash_iters }
async def queue_showcase(self, ctx, *, month: NaturalDateConverter=None): """ [MOD ONLY] Lists a month's queue in the showcase format. Arguments: * month: Optional. Specify the month to list applications for. Default: next month. Examples: .spotlight q s 2018-03 .spotlight q s March 2018 """ logger.debug("queue showcase: {}".format(message_log_str(ctx.message))) self._load_applications() logger.info("Listing showcase queue for {0.author!s} in {0.channel!s}".format(ctx.message)) month = month # type: datetime # figure out month start/end times if not month: month = get_month_offset(datetime.utcnow(), 1) else: month = truncate(month, 'month') month_end = get_month_offset(month, 1) month_ts, month_end_ts = utctimestamp(month), utctimestamp(month_end) app_strings = self._get_queue_list(showcase=True) # filter by month filt_app_strings = [] for queue_item, app_string in zip(self.queue_data, app_strings): if month_ts <= queue_item['start'] < month_end_ts: filt_app_strings.append(app_string) if filt_app_strings: app_list_string = format_list(filt_app_strings) else: app_list_string = 'Empty' await self.bot.say('{}\n```{}```'.format(self.QUEUE_HEADING, app_list_string))
def __init__(self, bot): super().__init__(bot, 'subwatch', SubwatchConfig, SubwatchState) self.cog_config.set_defaults( reddit_username=None, check_interval=60, min_post_interval=300, max_posts_per_interval=2, ) self.cog_state.set_defaults(channels=dict(), last_checked=utctimestamp( datetime.utcnow()), no_results_count=0) self.reddit = None # type: reddit.Reddit self.stream_manager = None # type: RedditStreamManager self.queue_manager = None # type: QueueManager
async def do_monthly_tasks(self): last_month = utils.datetime.get_month_offset(self.acc.period, -1) if last_month > self.last_report_dt: logger.debug("monthly tasks: last month {}, last report {}".format( last_month.isoformat(' '), self.last_report_dt.isoformat(' '))) # Do all months since the last month processed month = utils.datetime.get_month_offset(self.last_report_dt, 1) while month <= last_month: logger.info("Doing monthly tasks for {}".format( month.strftime('%B %Y'))) await self.anonymize_monthly_data(month) await self.generate_monthly_report(month) month = utils.datetime.get_month_offset(month, 1) self.last_report_dt = last_month self.state.set('userstats', 'last_report', utctimestamp(self.last_report_dt)) self.state.write()
async def queue_edit(self, ctx, queue_index: int, *, daterange: str): """ [MOD ONLY] Change the scheduled date of a spotlight application in the queue. This command takes a QUEUE INDEX, not by spotlight number. Check the index with `.spotlight queue list`. Note: KazTron will not take any action on the scheduled date. It is purely informational, intended for the bot operator, as well as determining the order of the queue. Arguments: * `<queue_index>`: Required, int. The numerical position in the queue, as shown with `.spotlight queue list`. * `<daterange>`: Required, string. A string in the form "date1 to date2". Each date can be in one of these formats: * An exact date: "2017-12-25", "25 December 2017", "December 25, 2017" * A partial date: "April 23" * A time expression: "tomorrow", "next week", "in 5 days". Does **not** accept days of the week ("next Tuesday"). Examples: `.spotlight queue edit 3 april 3 to april 6` """ logger.debug("queue edit: {}".format(message_log_str(ctx.message))) self._load_applications() # Retrieve the queue item if queue_index is not None: queue_array_index = queue_index - 1 if not (0 <= queue_array_index < len(self.queue_data)): raise commands.BadArgument( ("{0:d} is not a valid queue index! " "Currently valid values are 1 to {1:d} inclusive.") .format(queue_index, len(self.queue_data))) else: queue_array_index = -1 # last item queue_item = self.queue_data[queue_array_index] array_index = queue_item['index'] list_index = array_index + 1 # user-facing # parse the daterange try: dates = parse_daterange(daterange) except ValueError as e: raise commands.BadArgument(e.args[0]) from e # Make the changes queue_item['start'] = utctimestamp(dates[0]) # same mutable object as in queue_data queue_item['end'] = utctimestamp(dates[1]) # same mutable object as in queue_data self.sort_queue() new_queue_index = self.queue_data.index(queue_item) + 1 # Prepare the output try: # don't use _get_app - don't want errmsgs app_str = self.applications[array_index].discord_str() except IndexError: app_str = self.UNKNOWN_APP_STR start, end = self.format_date_range(dates[0], dates[1]) logger.info("queue edit: changed item {:d} to dates {} to {}" .format(queue_index, dates[0].isoformat(' '), dates[1].isoformat(' '))) self._write_db() await self.bot.say(self.QUEUE_CHANGED_FMT.format( msg=self.QUEUE_EDIT_HEADING, i=new_queue_index, id=list_index, start=start, end=end, app=app_str ))
def datetime2loop(dt: datetime, loop: asyncio.AbstractEventLoop = None) -> float: return timestamp2loop(utctimestamp(dt), loop)
def capture_timed_event_start(self, timestamp: datetime, type_: EventType, user: discord.Member, channel: discord.Channel): key = self._make_tuple(type_, user, channel) self.start_times[key] = utctimestamp(timestamp)
def _loop_time(self, timestamp: float) -> float: now_loop = self.time_callback() now_timestamp = utctimestamp(datetime.utcnow()) return now_loop + (timestamp - now_timestamp)
def to_dict(self): return { 'subreddits': self.subreddits, 'queue': self.queue, 'last_posted': utctimestamp(self.last_posted) }
def to_dict(self): return { 'user_id': self.user_id, 'timestamp': utctimestamp(self.timestamp), 'data': self.data }