async def do_prestige(self, bot, kept_exp): """ Reset a player data, persisting his/her ID. What's left to do is. """ self.prestige += 1 self.experience = kept_exp meta = self._meta reset_fields_names = meta.fields.copy() - self.PRESTIGE_SAVED_FIELDS # Set (almost) everything back to default for reset_fields_name in reset_fields_names: field_object = meta.fields_map[reset_fields_name] default = field_object.default if callable(field_object.default): setattr(self, reset_fields_name, default()) else: setattr(self, reset_fields_name, default) # Fix auto_now_add fields since they don't get back to defaults self.prestige_last_daily = timezone.now() - datetime.timedelta(days=1) self.last_giveback = timezone.now() level_info = self.level_info() self.magazines = level_info["magazines"] self.bullets = level_info["bullets"] await self.change_roles(bot)
def _create_token( self, type_token: str, exp_time: Optional[int], *, custom_claims, ) -> str: # Data section claims = { "iat": timezone.now(), "nbf": timezone.now(), "jti": str(uuid.uuid4()), "type": type_token, **custom_claims, } if exp_time: claims["exp"] = exp_time if self.issuer: claims["iss"] = self.issuer if self.audience: claims["aud"] = self.audience key = JsonWebKey.import_key(self.private_key) headers = {"alg": "RS256", "kid": key.thumbprint()} return jwt.encode(header=headers, payload=claims, key=key)
def _get_expired_time( type_token: str, identifier_type=None, expires_time: Optional[Union[timedelta, bool]] = None ) -> Optional[datetime]: """ 如果设置为False,则永不过期 如果没设置或者设置为True,则设置为默认值 如果特殊设置了expires_time, 则设置为对应的expires_time """ if expires_time is False: return None if expires_time is True or expires_time is None: if identifier_type == "APP_KEY": ret = settings.jwt_access_token_expires_app_key else: if type_token == "access": ret = settings.jwt_access_token_expires elif type_token == "refresh": ret = settings.jwt_refresh_token_expires else: raise NotImplementedError else: ret = expires_time return timezone.now() + ret
async def create_connection(self, with_db: bool) -> None: if charset_by_name(self.charset) is None: # type: ignore raise DBConnectionError(f"Unknown charset {self.charset}") self._template = { "host": self.host, "port": self.port, "user": self.user, "db": self.database if with_db else None, "autocommit": True, "charset": self.charset, "minsize": self.pool_minsize, "maxsize": self.pool_maxsize, **self.extra, } try: self._pool = await aiomysql.create_pool(password=self.password, **self._template) if isinstance(self._pool, aiomysql.Pool): async with self.acquire_connection() as connection: async with connection.cursor() as cursor: if self.storage_engine: await cursor.execute( f"SET default_storage_engine='{self.storage_engine}';" ) if self.storage_engine.lower() != "innodb": # pragma: nobranch self.capabilities.__dict__["supports_transactions"] = False hours = timezone.now().utcoffset().seconds / 3600 # type: ignore tz = "{:+d}:{:02d}".format(int(hours), int((hours % 1) * 60)) await cursor.execute(f"SET SESSION time_zone='{tz}';") self.log.debug("Created connection %s pool with params: %s", self._pool, self._template) except pymysql.err.OperationalError: raise DBConnectionError(f"Can't connect to MySQL server: {self._template}")
def to_db_value( self, value: Optional[Union[datetime.time, datetime.timedelta]], instance: "Union[Type[Model], Model]", ) -> Optional[Union[datetime.time, datetime.timedelta]]: # Only do this if it is a Model instance, not class. Test for guaranteed instance var if hasattr(instance, "_saved_in_db") and ( self.auto_now or (self.auto_now_add and getattr(instance, self.model_field_name) is None)): now = timezone.now().time() setattr(instance, self.model_field_name, now) return now if value is not None: if isinstance(value, datetime.timedelta): return value if get_use_tz(): if timezone.is_naive(value): warnings.warn( "TimeField %s received a naive time (%s)" " while time zone support is active." % (self.model_field_name, value), RuntimeWarning, ) value = value.replace(tzinfo=get_default_timezone()) self.validate(value) return value
def to_db_value( self, value: Optional[datetime.datetime], instance: "Union[Type[Model], Model]" ) -> Optional[datetime.datetime]: # Only do this if it is a Model instance, not class. Test for guaranteed instance var if hasattr(instance, "_saved_in_db") and ( self.auto_now or (self.auto_now_add and getattr(instance, self.model_field_name) is None)): value = timezone.now() setattr(instance, self.model_field_name, value) return value if value is not None: if get_use_tz(): if timezone.is_naive(value): warnings.warn( "DateTimeField %s received a naive datetime (%s)" " while time zone support is active." % (self.model_field_name, value), RuntimeWarning, ) value = timezone.make_aware(value, "UTC") self.validate(value) return value
async def get_workers(request: Request, rearq: ReArq = Depends(get_rearq)): redis = rearq.redis workers_info = await redis.hgetall(constants.WORKER_KEY) workers = [] for worker_name, value in workers_info.items(): job_stat = (await JobResult.filter(worker=worker_name).annotate( count=Count("job_id")).group_by("job__status").values( "count", status="job__status", )) item = { "name": worker_name, "job_stat": {job["status"]: job["count"] for job in job_stat} } item.update(json.loads(value)) time = ms_to_datetime(item["ms"]) item["time"] = time item["is_offline"] = ( timezone.now() - time).seconds > constants.WORKER_HEARTBEAT_SECONDS + 10 workers.append(item) return templates.TemplateResponse("worker.html", { "request": request, "page_title": "worker", "workers": workers })
async def test_values_list(self): now = timezone.now() obj0 = await testmodels.DatetimeFields.create(datetime=now) values = await testmodels.DatetimeFields.get(id=obj0.id ).values_list("datetime", flat=True) self.assertEqual(values, now)
async def test_default_timezone(self): now = timezone.now() obj = await testmodels.DatetimeFields.create(datetime=now) self.assertEqual(obj.datetime.tzinfo.zone, "UTC") obj_get = await testmodels.DatetimeFields.get(pk=obj.pk) self.assertEqual(obj_get.datetime.tzinfo.zone, "UTC") self.assertEqual(obj_get.datetime, now)
async def reply_parser(client, oid, reply_type): if cache := await reply_cache.get_or_none( None, query := Q(Q(oid=oid), Q(reply_type=reply_type)), Q(created__gte=timezone.now() - reply_cache.timeout), ): logger.info(f"拉取评论缓存: {cache.created}") r = cache.content
async def delay( self, args: Optional[Tuple[Any, ...]] = None, kwargs: Optional[Dict[str, Any]] = None, job_id: str = None, countdown: Union[float, datetime.timedelta] = 0, eta: Optional[datetime.datetime] = None, expire: Optional[Union[float, datetime.datetime]] = None, job_retry: int = 0, job_retry_after: int = 60, ) -> Job: if not job_id: job_id = uuid4().hex if countdown: defer_ts = to_ms_timestamp(countdown) elif eta: defer_ts = to_ms_timestamp(eta) else: defer_ts = timestamp_ms_now() expire_time = None expires = expire or self.expire if expires: expire_time = ms_to_datetime(to_ms_timestamp(expires)) job = await Job.get_or_none(job_id=job_id) if job: logger.warning(f"Job {job_id} exists") return job job = Job( task=self.function.__name__, args=args, kwargs=kwargs, job_retry=job_retry or self.job_retry, queue=self.queue, job_id=job_id, expire_time=expire_time, enqueue_time=timezone.now(), job_retry_after=job_retry_after, ) if not eta and not countdown: job.status = JobStatus.queued await job.save() await self.rearq.redis.xadd(self.queue, {"job_id": job_id}) else: job.status = JobStatus.deferred await job.save() await self.rearq.redis.zadd(DELAY_QUEUE, defer_ts, f"{self.queue}:{job_id}") return job
async def test_create(self): now = timezone.now().timetz() obj0 = await testmodels.TimeFields.create(time=now) boj1 = await testmodels.TimeFields.get(id=obj0.id) self.assertEqual( boj1.time, timedelta( hours=now.hour, minutes=now.minute, seconds=now.second, microseconds=now.microsecond, ), )
async def test_get(self): now = timezone.now().timetz() await testmodels.TimeFields.create(time=now) obj = await testmodels.TimeFields.get(time=now) self.assertEqual( obj.time, timedelta( hours=now.hour, minutes=now.minute, seconds=now.second, microseconds=now.microsecond, ), )
async def test_values(self): now = timezone.now().timetz() obj0 = await testmodels.TimeFields.create(time=now) values = await testmodels.TimeFields.get(id=obj0.id).values("time") self.assertEqual( values["time"], timedelta( hours=now.hour, minutes=now.minute, seconds=now.second, microseconds=now.microsecond, ), )
async def test_count(self): now = timezone.now() obj = await testmodels.DatetimeFields.create(datetime=now) self.assertEqual( await testmodels.DatetimeFields.filter(datetime=obj.datetime).count(), 1) self.assertEqual( await testmodels.DatetimeFields.filter(datetime_auto=obj.datetime_auto ).count(), 1) self.assertEqual( await testmodels.DatetimeFields.filter(datetime_add=obj.datetime_add ).count(), 1)
async def _handle_stopping_sales(self) -> None: """Handle stopping sales for started lotteries if strike date is close enough""" started_lotteries = await Lottery.filter(status=LotteryStatus.STARTED) stop_sales_for_lotteries_ids = [] for lottery in started_lotteries: total_seconds_to_lottery = (lottery.strike_date_eta - timezone.now()).total_seconds() if total_seconds_to_lottery < STOP_SALES_BEFORE_START_IN_SEC: # change lottery status to LotteryStatus.STOP_SALES stop_sales_for_lotteries_ids.append(lottery.id) if stop_sales_for_lotteries_ids: # bulk change lotteries to LotteryStatus.STOP_SALES await Lottery.filter(id__in=stop_sales_for_lotteries_ids).update(status=LotteryStatus.STOP_SALES) logging.debug(f":::lottery_cron: Stopped selling tickets for: {stop_sales_for_lotteries_ids}") return None
async def _pre_run(self): async with await self._lock_manager.lock(constants.WORKER_KEY_LOCK): workers_info = await self._redis.hgetall(constants.WORKER_KEY) for worker_name, value in workers_info.items(): value = json.loads(value) time = ms_to_datetime(value["ms"]) is_offline = ( timezone.now() - time ).seconds > constants.WORKER_HEARTBEAT_SECONDS + 10 if value.get("is_timer") and not is_offline: msg = f"There is a timer worker `{worker_name}` already, you can only start one timer worker" logger.error(msg) raise UsageError(msg) else: await self._push_heartbeat()
async def test_create(self): now = timezone.now() obj0 = await testmodels.DatetimeFields.create(datetime=now) obj = await testmodels.DatetimeFields.get(id=obj0.id) self.assertEqual(obj.datetime, now) self.assertEqual(obj.datetime_null, None) self.assertLess(obj.datetime_auto - now, timedelta(microseconds=20000)) self.assertLess(obj.datetime_add - now, timedelta(microseconds=20000)) datetime_auto = obj.datetime_auto sleep(0.012) await obj.save() obj2 = await testmodels.DatetimeFields.get(id=obj.id) self.assertEqual(obj2.datetime, now) self.assertEqual(obj2.datetime_null, None) self.assertEqual(obj2.datetime_auto, obj.datetime_auto) self.assertNotEqual(obj2.datetime_auto, datetime_auto) self.assertGreater(obj2.datetime_auto - now, timedelta(microseconds=10000)) self.assertLess(obj2.datetime_auto - now, timedelta(seconds=1)) self.assertEqual(obj2.datetime_add, obj.datetime_add)
async def cleanup(): """ remove users that haven't been updated in a while, save disk-space for servers with large amounts of users """ expire_parts = get_settings().get_expire_time() now = timezone.now() if expire_parts[1] == "d": duration = timedelta(days=expire_parts[0]) elif expire_parts[1] == "w": duration = timedelta(weeks=expire_parts[0]) elif expire_parts[1] == "m": duration = timedelta(days=expire_parts[0] * 12) elif expire_parts[1] == "y": duration = timedelta(days=expire_parts[0] * 365) else: raise ValueError("unknown expires type") expire_dt = now - duration await UserModel.filter(updated_at__lt=expire_dt).delete()
async def run_cron(self): """ run cron task :return: """ redis = self._redis cron_tasks = CronTask.get_cron_tasks() p = redis.pipeline() execute = False jobs = [] for function, task in cron_tasks.items(): if timestamp_ms_now() >= task.next_run: execute = True job_id = uuid4().hex if task.function == check_pending_msgs: asyncio.ensure_future( check_pending_msgs(task, task.queue, self.group_name, self.job_timeout) ) else: logger.info(f"{task.function.__name__}()") jobs.append( Job( task=function, job_retry=self.job_retry, queue=task.queue, job_id=job_id, enqueue_time=timezone.now(), job_retry_after=self.job_retry_after, status=JobStatus.queued, ) ) p.xadd(task.queue, {"job_id": job_id}) self.jobs_complete += 1 task.set_next() if jobs: await Job.bulk_create(jobs) if execute: await p.execute()
async def _run_at_start(self): jobs = [] p = self._redis.pipeline() for function, task in CronTask.get_cron_tasks().items(): if task.run_at_start: logger.info(f"{function}() <- run at start") job_id = uuid4().hex jobs.append( Job( task=function, job_retry=self.job_retry, queue=task.queue, job_id=job_id, enqueue_time=timezone.now(), job_retry_after=self.job_retry_after, status=JobStatus.queued, ) ) p.xadd(task.queue, {"job_id": job_id}) self.jobs_complete += 1 if jobs: await Job.bulk_create(jobs) await p.execute()
async def test_get(self): now = timezone.now().timetz() await testmodels.TimeFields.create(time=now) obj = await testmodels.TimeFields.get(time=now) self.assertEqual(obj.time, now)
async def run_job(self, queue: str, msg_id: str, job: Job): if job.expire_time and job.expire_time > timezone.now(): logger.warning(f"job {job.job_id} is expired, ignore") job.status = JobStatus.expired await job.save(update_fields=["status"]) return job_id = job.job_id job_result = JobResult( msg_id=msg_id, job=job, worker=self.worker_name, start_time=timezone.now() ) task = self._task_map.get(job.task) if not task: logger.warning(f"job {job_id}, task {job.task} not found") job_result.result = "task not found" await job_result.save() return job_result ref = f"{job_id}:{job.task}" start_ms = timestamp_ms_now() logger.info( "%6.2fs → %s(%s)%s" % ( (start_ms - to_ms_timestamp(job.enqueue_time)) / 1000, ref, args_to_string(job.args, job.kwargs), f" try={job.job_retries}" if job.job_retries > 1 else "", ) ) try: async with async_timeout.timeout(self.job_timeout): if task.bind: result = await task.function(task, *(job.args or []), **(job.kwargs or {})) else: result = await task.function(*(job.args or []), **(job.kwargs or {})) job_result.success = True job_result.finish_time = timezone.now() job.status = JobStatus.success logger.info("%6.2fs ← %s ● %s" % ((timestamp_ms_now() - start_ms) / 1000, ref, result)) self.jobs_complete += 1 except Exception as e: job_result.finish_time = timezone.now() self.jobs_failed += 1 result = f"Run task error in NO.{job.job_retries} times, exc: {e}, retry after {self.job_retry_after} seconds" logger.error("%6.2fs ← %s ● %s" % ((timestamp_ms_now() - start_ms) / 1000, ref, result)) if job.job_retries >= job.job_retry: t = (timestamp_ms_now() - to_ms_timestamp(job.enqueue_time)) / 1000 logger.error("%6.2fs ! %s max retries %d exceeded" % (t, ref, job.job_retry)) job.status = JobStatus.failed else: job.status = JobStatus.deferred job.job_retries = F("job_retries") + 1 await self.rearq.zadd(to_ms_timestamp(self.job_retry_after), f"{queue}:{job_id}") finally: await self._xack(queue, msg_id) await job.save(update_fields=["status", "job_retries"]) job_result.result = result await job_result.save() return job_result
else: await reply_cache(oid=oid, reply_type=reply_type, content=r).save() return r @safe_parser async def dynamic_parser(client: httpx.AsyncClient, url: str): if not (match := re.search(r"[th]\.bilibili\.com[\/\w]*\/(\d+)", url)): raise ParserException("动态链接错误", url, match) f = dynamic(url) query = (Q(rid=match.group(1)) if "type=2" in match.group(0) else Q( dynamic_id=match.group(1))) if cache := await dynamic_cache.get_or_none( None, query, Q(created__gte=timezone.now() - dynamic_cache.timeout), ): logger.info(f"拉取动态缓存: {cache.created}") f.detailcontent = cache.content else: r = await client.get( "https://api.vc.bilibili.com/dynamic_svr/v1/dynamic_svr/get_dynamic_detail", params={ "rid": match.group(1), "type": 2 } if "type=2" in match.group(0) or "h.bilibili.com" in match.group(0) else {"dynamic_id": match.group(1)}, ) f.detailcontent = r.json() if not f.detailcontent.get("data").get("card"): raise ParserException("动态解析错误", r.url, f.detailcontent)
async def test_create(self): now = timezone.now().timetz() obj0 = await testmodels.TimeFields.create(time=now) boj1 = await testmodels.TimeFields.get(id=obj0.id) self.assertEqual(boj1.time, now)
async def get_clan_members_inactive(self, clan_db, **kwargs): if not kwargs: kwargs = dict(days=30) return await ClanMember.filter( last_active__lt=timezone.now() - timedelta(**kwargs), clan=clan_db ).prefetch_related("member")
async def handle_ticket_opening(self, channel: discord.TextChannel, user: discord.User): db_user: DiscordUser = await get_from_db(user, as_user=True) ticket = await db_user.get_or_create_support_ticket() await ticket.save() await channel.send( content= f"Opening a DM channel with {user.name}#{user.discriminator} ({user.mention}).\n" f"Every message in here will get sent back to them if it's not a bot message, " f"DuckHunt command, and if it doesn't start with the > character.\n" f"You can use many commands in the DM channels, detailed in " f"`dh!help private_support`\n" f"• `dh!ps close` will close the channel, sending a DM to the user.\n" f"• `dh!ps tag tag_name` will send a tag to the user *and* in the channel. " f"The two are linked, so changing pages in this channel " f"will change the page in the DM too.\n" f"• `dh!ps block` will block the user from opening further channels.\n" f"• `dh!ps huh` should be used if the message is not a support request, " f"and will silently close the channel.\n" f"Attachments are supported in messages.\n\n" f"Thanks for helping with the bot DM support ! <3") players_data = await Player.all().filter( member__user=db_user ).order_by("-last_giveback").select_related("channel").limit(5) info_embed = discord.Embed(color=discord.Color.blurple(), title="Support information") info_embed.description = "Information in this box isn't meant to be shared outside of this channel, and is " \ "provided for support purposes only. \n" \ "Nothing was sent to the user about this." info_embed.set_author(name=f"{user.name}#{user.discriminator}", icon_url=str(user.avatar_url)) info_embed.set_footer(text="Private statistics") ticket_count = await db_user.support_ticket_count() info_embed.add_field(name="User language", value=str(db_user.language), inline=True) if db_user.access_level_override != AccessLevel.DEFAULT: info_embed.add_field(name="Access Level", value=str(db_user.access_level_override.name), inline=True) fs_td = format_timedelta(db_user.first_seen - timezone.now(), granularity="minute", add_direction=True, format="short", locale="en") info_embed.add_field(name="First seen", value=str(fs_td), inline=True) info_embed.add_field(name="Tickets created", value=str(ticket_count), inline=True) if ticket_count > 1: last_ticket = await SupportTicket.filter( user=db_user, closed=True).order_by( '-opened_at').select_related('closed_by').first() ftd = format_timedelta(last_ticket.closed_at - timezone.now(), granularity="minute", add_direction=True, format="short", locale="en") value = f"Closed {ftd} by {last_ticket.closed_by.name}." if last_ticket.close_reason: value += f"\n{last_ticket.close_reason}" info_embed.add_field(name="Previous ticket", value=value, inline=False) for player_data in players_data: if player_data.channel.enabled: info_embed.add_field( name= f"#{player_data.channel} - {player_data.experience} exp", value= f"[Statistics](https://duckhunt.me/data/channels/{player_data.channel.discord_id}/{user.id})" ) else: info_embed.add_field( name=f"#{player_data.channel} [DISABLED]", value= f"[Statistics](https://duckhunt.me/data/channels/{player_data.channel.discord_id}/{user.id}) - {player_data.experience} exp" ) await channel.send(embed=info_embed) _ = get_translate_function(self.bot, db_user.language) welcome_embed = discord.Embed(color=discord.Color.green(), title="Support ticket opened") welcome_embed.description = \ _("Welcome to DuckHunt private messages support.\n" "Messages here are relayed to a select group of volunteers and bot moderators to help you use the bot. " "For general questions, we also have a support server " "[here](https://duckhunt.me/support).\n" "If you opened the ticket by mistake, just say `close` and we'll close it for you, otherwise, we'll get " "back to you in a few minutes.") welcome_embed.set_footer( text=_("Support tickets are deleted after 24 hours of inactivity")) try: await user.send(embed=welcome_embed) except discord.Forbidden: await channel.send( content= "❌ It seems I can't send messages to the user, you might want to close the DM. " "`dh!ps close`.")
async def test_get_now(self): now = timezone.now() await testmodels.DatetimeFields.create(datetime=now) obj = await testmodels.DatetimeFields.get(datetime=now) self.assertEqual(obj.datetime, now)
async def test_cast(self): now = timezone.now() obj0 = await testmodels.DatetimeFields.create(datetime=now.isoformat()) obj = await testmodels.DatetimeFields.get(id=obj0.id) self.assertEqual(obj.datetime, now)
async def test_values(self): now = timezone.now() obj0 = await testmodels.DatetimeFields.create(datetime=now) values = await testmodels.DatetimeFields.get(id=obj0.id ).values("datetime") self.assertEqual(values["datetime"], now)