async def leaderboard( self, ctx, tag_condition: Optional[Literal["every", "any"]] = "every", season: Optional[int] = CURRENT_SEASON, *tags: str, ): """View the leaderboard for completed challenges. tag_condition decides whether to filter by challenges that have EVERY specified tag, or ANY of the specified tags, defaults to EVERY. Note that specifying no tags with "every" filters to all challenges, while no tags with "any" filters to zero challenges! """ tag_filter = (Challenge.tags.contains if tag_condition == "every" else Challenge.tags.overlap) score = sa.func.sum(Challenge.points).label("score") count = sa.func.count(Challenge.points).label("count") rank = sa.func.dense_rank().over( order_by=sa.desc("score")).label("rank") q = (db.select([User.discord_id, score, count]).select_from( User.join(CompletedChallenge).join(Challenge)).where( tag_filter(tags)).where( CompletedChallenge.season == season).group_by( User.discord_id).alias("inner")) scores = await (db.select([q.c.discord_id, q.c.score, q.c.count, rank]).select_from(q).order_by( sa.desc(q.c.score)).limit(10).gino.load( ( q.c.discord_id, ColumnLoader(q.c.score), ColumnLoader(q.c.count), ColumnLoader(rank), )).all()) scores_formatted = [[r, self.bot.get_user(uid), s, c] for (uid, s, c, r) in scores] table = tabulate( scores_formatted, headers=["Rank", "Username", "Score", "Solved Challenges"], tablefmt="github", ) await ctx.send(f"Scoreboard for season {season}: ```\n{table}\n```")
async def get_track(owner_id, track_id): likes = db.func.count(TrackLike.track_id) track = await db.select([PenguinTrack, likes])\ .select_from(PenguinTrack.outerjoin(TrackLike))\ .where((PenguinTrack.owner_id == owner_id) & (PenguinTrack.id == track_id))\ .group_by(PenguinTrack.id).gino.load(PenguinTrack.load(likes=ColumnLoader(likes))).first() return track
async def challenge_by_tag(request: HTTPConnection): tag = request.path_params["tag"] solves = sa.func.count(CompletedChallenge.challenge_id).label("solves") select = [Challenge, solves] columns = (Challenge, ColumnLoader(solves)) if request.user.is_authenticated: solved_challenges = (db.select([ CompletedChallenge.challenge_id ]).where(CompletedChallenge.season == CURRENT_SEASON).where( CompletedChallenge.discord_id == request.user.discord_id).cte( "solved_challenges")) solved = (sa.exists().where( solved_challenges.c.challenge_id == Challenge.id).label("solved")) select.append(solved) columns = (*columns, ColumnLoader(solved)) challenges = await (db.select(select).select_from( Challenge.outerjoin( CompletedChallenge, (Challenge.id == CompletedChallenge.challenge_id) & (CompletedChallenge.season == CURRENT_SEASON), )).group_by(Challenge.id).where(Challenge.tags.contains( [tag])).order_by(Challenge.creation_date.desc(), Challenge.id.desc()).gino.load(columns).all()) rendered = [( w, length_constrained_plaintext_markdown(w.content), solves, did_solve and did_solve[0], ) for (w, solves, *did_solve) in challenges if not should_skip_challenge(w, request.user.is_admin)] return templates.TemplateResponse( "challenge/index.j2", { "request": request, "challenges": rendered, }, )
async def handle_refresh_my_track_likes(p): likes = db.func.count(TrackLike.track_id) track_likes_query = db.select([PenguinTrack.id, likes])\ .select_from(PenguinTrack.outerjoin(TrackLike))\ .where(PenguinTrack.owner_id == p.id)\ .group_by(PenguinTrack.id).gino.load(PenguinTrack.load(likes=ColumnLoader(likes))) async with db.transaction(): async for track in track_likes_query.iterate(): await p.send_xt('getlikecountfortrack', p.id, f'10000{track.id}', track.likes)
async def get_player_tracks(p): player_tracks = [] likes = db.func.count(TrackLike.track_id) tracks_query = db.select([PenguinTrack, likes])\ .select_from(PenguinTrack.outerjoin(TrackLike))\ .where(PenguinTrack.owner_id == p.id)\ .group_by(PenguinTrack.id).gino.load(PenguinTrack.load(likes=ColumnLoader(likes))) async with db.transaction(): async for track in tracks_query.iterate(): player_tracks.append(f'10000{track.id}|{track.name}|{int(track.sharing)}|{track.likes}') return player_tracks
async def get_tracks(self): self.playlist = [] likes = db.func.count(TrackLike.track_id) tracks_query = db.select([PenguinTrack, likes]) \ .select_from(PenguinTrack.outerjoin(TrackLike)) \ .where((PenguinTrack.owner_id.in_(tuple(self.room.penguins_by_id.keys()))) & (PenguinTrack.sharing == True)) \ .group_by(PenguinTrack.id).gino.load(PenguinTrack.load(likes=ColumnLoader(likes))) async with db.transaction(): async for track in tracks_query.iterate(): self.playlist.append(track)
async def get_shared_tracks(p): shared_tracks = [] likes = db.func.count(TrackLike.track_id) tracks_query = db.select([PenguinTrack, likes])\ .select_from(PenguinTrack.outerjoin(TrackLike))\ .where((PenguinTrack.owner_id.in_(tuple(p.server.penguins_by_id.keys())) & (PenguinTrack.sharing == True)))\ .group_by(PenguinTrack.id).gino.load(PenguinTrack.load(likes=ColumnLoader(likes))) async with db.transaction(): async for track in tracks_query.iterate(): penguin = p.server.penguins_by_id[track.owner_id] shared_tracks.append(f'{penguin.id}|{penguin.safe_name}|10000{track.id}|{track.likes}') return shared_tracks
async def info( self, ctx, season: Optional[int] = CURRENT_SEASON, ): """Get info about your solved and unsolved challenges.""" solved_challenges = (db.select([ CompletedChallenge.challenge_id ]).where(CompletedChallenge.discord_id == ctx.author.id).where( CompletedChallenge.season == season).cte("solved_challenges")) solved = (sa.exists().where( solved_challenges.c.challenge_id == Challenge.id).label("solved")) score = sa.func.sum(Challenge.points).label("score") scores_q = (db.select([User.discord_id, score]).select_from( User.outerjoin(CompletedChallenge).outerjoin(Challenge)).where( CompletedChallenge.season == season).group_by( User.discord_id).cte("scores")) my_score = (db.select([ sa.func.coalesce(scores_q.c.score, 0) ]).select_from(scores_q).where(scores_q.c.discord_id == ctx.author.id)) rank_value = await (db.select([ sa.func.count(sa.distinct(scores_q.c.score)) + 1 ]).where(scores_q.c.score > my_score).gino.scalar()) info = await (db.select([Challenge, solved ]).where(sa.not_(Challenge.hidden)).gino.load( (Challenge, ColumnLoader(solved))).all()) solved, unsolved = split_on(info) points = sum(c.points for c in solved) count = len(solved) solved_msg = ", ".join(c.title for c in solved) or "No solves" unsolved_msg = ", ".join(c.title for c in unsolved) or "All solved" msg = textwrap.dedent(f""" Challenge info for {ctx.author} in season {season}: Solves: {count} Points: {points} Rank: {rank_value} Solved: `{solved_msg}` Unsolved: `{unsolved_msg}` """) await ctx.send(msg)
async def get_tracks(self): self.playlist = [] self.penguins_by_track_id = {} room = self.server.rooms[SoundStudio.StudioRoomId] room_penguins = room.penguins_by_id.copy() likes = db.func.count(TrackLike.track_id) tracks_query = db.select([PenguinTrack, likes]) \ .select_from(PenguinTrack.outerjoin(TrackLike)) \ .where((PenguinTrack.owner_id.in_(tuple(room_penguins.keys()))) & (PenguinTrack.sharing == True)) \ .group_by(PenguinTrack.id).gino.load(PenguinTrack.load(likes=ColumnLoader(likes))) async with db.transaction(): async for track in tracks_query.iterate(): self.playlist.append(track) self.penguins_by_track_id[track.id] = room_penguins[track.owner_id]
async def get(self): times_func = db.func.count(Ticket.passenger_id) amount_func = db.func.sum(TicketFlight.amount) query = (db.select( [Ticket.passenger_id, times_func, amount_func]).select_from( Ticket.outerjoin( TicketFlight, Ticket.ticket_no == TicketFlight.ticket_no)).group_by( Ticket.passenger_id).gino.load(( Ticket.passenger_id, ColumnLoader(times_func), ColumnLoader(amount_func), ))) res = Stat() async with db.transaction(): async for passenger_id, times, amount in query.iterate(): res.passengers.append( StatItem(passenger_id=passenger_id, times=times, amount=amount)) return json_response(data=Stat.Schema().dump(res))
async def challenge_view(request: HTTPConnection): slug = request.path_params["slug"] solves = sa.func.count(CompletedChallenge.challenge_id).label("solves") challenge = await (db.select([Challenge, solves]).select_from( Challenge.outerjoin( CompletedChallenge, (Challenge.id == CompletedChallenge.challenge_id) & (CompletedChallenge.season == CURRENT_SEASON), )).group_by(Challenge.id).where(Challenge.slug == slug).gino.load( (Challenge, ColumnLoader(solves))).first()) if challenge is None: return abort(404, "Challenge not found") challenge, solves = challenge if should_skip_challenge(challenge, request.user.is_admin): return abort(404, "Challenge not found") if request.user.is_authenticated: solved_challenge = await CompletedChallenge.query.where( (CompletedChallenge.discord_id == request.user.discord_id) & (CompletedChallenge.challenge_id == challenge.id) & (CompletedChallenge.season == CURRENT_SEASON)).gino.first() else: solved_challenge = False rendered = highlight_markdown_unsafe(challenge.content) return templates.TemplateResponse( "challenge/view.j2", { "challenge": challenge, "request": request, "rendered": rendered, "solves": solves, "submit_form": AnswerForm(), "solved_challenge": solved_challenge, }, )
async def stats( self, ctx, tag_condition: Optional[Literal["every", "any"]] = "every", season: Optional[int] = CURRENT_SEASON, *tags: str, ): """View the most and lest solved challenges. tag_condition decides whether to filter by challenges that have EVERY specified tag, or ANY of the specified tags, defaults to EVERY. Note that specifying no tags with "every" filters to all challenges, while no tags with "any" filters to zero challenges! """ tag_filter = (Challenge.tags.contains if tag_condition == "every" else Challenge.tags.overlap) count = sa.func.count(CompletedChallenge.challenge_id).label("count") rank = sa.func.dense_rank().over( order_by=sa.desc("count")).label("rank") q_most = (db.select([ Challenge.title, Challenge.points, count, Challenge.hidden ]).select_from(Challenge.outerjoin(CompletedChallenge)).where( tag_filter(tags)).where( CompletedChallenge.season == season).group_by( Challenge.id).alias("inner")) hidden_check_most = sa.case([(q_most.c.hidden, "X")], else_="").label("hidden_check") most = await (db.select([ rank, q_most.c.title, q_most.c.count, q_most.c.points, hidden_check_most ]).select_from(q_most).limit(5).order_by(sa.desc( q_most.c.count)).gino.load( (rank, q_most.c.title, ColumnLoader(q_most.c.count), q_most.c.points, hidden_check_most)).all()) q_least = (db.select([ Challenge.title, Challenge.points, count, Challenge.hidden ]).select_from(Challenge.outerjoin(CompletedChallenge)).where( tag_filter(tags)).where( CompletedChallenge.season == season).group_by( Challenge.id).alias("inner")) hidden_check_least = sa.case([(q_least.c.hidden, "X")], else_="").label("hidden_check") least = await (db.select([ rank, q_least.c.title, q_least.c.count, q_least.c.points, hidden_check_least ]).select_from(q_least).order_by(sa.asc( q_least.c.count)).limit(5).gino.load( (rank, q_least.c.title, ColumnLoader(q_least.c.count), q_least.c.points, hidden_check_least)).all()) most_table = tabulate( most, headers=["Rank", "Challenge Title", "Solves", "Points", "Hidden"], tablefmt="github", ) least_table = tabulate( least, headers=["Rank", "Challenge Title", "Solves", "Points", "Hidden"], tablefmt="github", ) msg = textwrap.dedent(""" ```md # Most Solved Challenges in season {season} {most_table} # Least Solved Challenges in season {season} {least_table} ``` """).format(most_table=most_table, least_table=least_table, season=season) await ctx.send(msg)
async def challenge_submit_answer(request: HTTPConnection): id = request.path_params["id"] form = await request.form() form = AnswerForm(form) is_valid = form.validate() answer = form.answer.data solves = sa.func.count(CompletedChallenge.challenge_id).label("solves") challenge = await (db.select([Challenge, solves]).select_from( Challenge.outerjoin( CompletedChallenge, (Challenge.id == CompletedChallenge.challenge_id) & (CompletedChallenge.season == CURRENT_SEASON), )).group_by(Challenge.id).where(Challenge.id == id).gino.load( (Challenge, ColumnLoader(solves))).first()) if challenge is None: return abort(404, "Challenge not found") challenge, solves = challenge if should_skip_challenge(challenge, request.user.is_admin): return abort(404, "Challenge not found") if (challenge.answer or challenge.flag) != answer: is_valid = False form.answer.errors.append("Incorrect answer.") # TODO? change this to a flash message if challenge.depreciated: is_valid = False form.answer.errors.append( "Correct, but this challenge is depreciated, sorry.") already_claimed = await CompletedChallenge.query.where( (CompletedChallenge.discord_id == request.user.discord_id) & (CompletedChallenge.challenge_id == challenge.id) & (CompletedChallenge.season == CURRENT_SEASON)).gino.first() if already_claimed is not None: # shouldn't see the form anyway is_valid = False form.answer.errors.append("You've already solved this challenge.") if is_valid: await CompletedChallenge.create( discord_id=request.user.discord_id, challenge_id=challenge.id, season=CURRENT_SEASON, ) return redirect_response( url=request.url_for("challenge_view", slug=challenge.slug)) rendered = highlight_markdown_unsafe(challenge.content) return templates.TemplateResponse( "challenge/view.j2", { "challenge": challenge, "request": request, "rendered": rendered, "solves": solves, "submit_form": form, }, )
async def get_top_donaters(cls) -> List[Tuple[str, Decimal]]: sums = db.func.sum(cls.sum) return await db.select([cls.nickname, sums]).group_by( cls.nickname).order_by(sums.desc()).limit(20).gino.load( (cls.nickname, ColumnLoader(sums))).all()
class AintQConsumer: delay = db.func.extract( 'EPOCH', Task.schedule - db.func.timezone('UTC', db.func.now())).label('delay') get_query = db.select([ Task, Task.ctid, delay, ]).order_by(Task.schedule.nullsfirst(), ).limit(1, ).with_for_update( skip_locked=True, ).execution_options(loader=Task.load( ctid=ColumnLoader(Task.ctid), delay=ColumnLoader(delay), )) """ The dequeue query, using ctid to identify the task within the same transaction, and central-database-calculated delay time to schedule deferred tasks. """ def __init__(self, aintq, size=8, loop=None): if loop is None: loop = asyncio.get_event_loop() self.aintq = aintq self._loop = loop self._semaphore = asyncio.Semaphore(loop=loop) """ This semaphore is used to wake consumer worker coroutines. """ self._maxsize = size """ Control concurrent consumer workers. """ self._free = self._size = 0 """ Internal counters presenting idle and alive workers in real time. """ self._running = True """ Used for graceful shutdown. """ self._ticker = None """ A asyncio timer handle for scheduled deferred tasks. All workers share one ticker. """ self._next_tick = 0 """ The ticker only fires once for the most recent task. """ self._break = True """ Used for a racing scenario when an event of new task arrives while the last busy worker is retrieving new tasks right after the last task was completed. """ self._events = asyncio.Queue(loop=loop) """ Used to turn new task event callbacks into coroutine. """ async def worker(self): """The outer worker coroutine. A worker is always in idle unless explicitly triggered by ``semaphore.release()``. Then the worker will run until there is no further tasks to immediately execute in the queue. """ self._size += 1 self._free += 1 logger = logging.getLogger('aintq.worker-{}'.format(self._size)) logger.info('Worker started.') try: while True: await self._semaphore.acquire() if not self._running or self._size > self._maxsize: raise _WorkerExit() logger.debug('awaken') self._free -= 1 try: while True: async with db.transaction(reuse=False): if await self._work(logger): break if not self._running or self._size > self._maxsize: raise _WorkerExit() if (await db.scalar(Task.seq.next_value())) % 256 == 0: await db.status('VACUUM ANALYZE aintq.tasks') except Exception: traceback.print_exc() # TODO: robust worker doesn't die, retry pass finally: self._free += 1 logger.debug('sleep') except _WorkerExit: pass finally: self._size -= 1 self._free -= 1 async def _work(self, logger): """The inner worker code within a transaction. Dequeue and locking the task must be step one to avoid deadlocks. Returning ``True`` will put the worker into sleep, indicating the end of queue is reached, or next task is scheduled to run in the future. Before executing a task, another worker is always awaken if there is any sleeping, so that the second task in the queue could be executed in time. The actual task code is executed within a savepoint subtransaction, thus it may fail and cause the subtransaction rolled back. The outer transaction is always committed with the task row deleted for performance. """ # Always break, unless stopped by a racing new task event self._break = True # The dequeue task = await self.get_query.gino.first() # End of queue? if task is None: logger.debug('end of queue') return self._break # Task is not due? if task.delay and task.delay > 0: logger.debug('no due') # Calculate next_tick in loop time next_tick = self._loop.time() + task.delay # Keep only the most recent tick if next_tick < self._next_tick and self._ticker is not None: ticker, self._ticker = self._ticker, None ticker.cancel() # Schedule the ticker if self._ticker is None: logger.debug('set tick %r', task.delay) self._next_tick = next_tick self._ticker = self._loop.call_at(next_tick, self._tick) return self._break # Got a task, wake up another sleeping worker if any logger.debug('got a task %r', task.ctid) if self._free: self._semaphore.release() # Create a savepoint for actual task async with db.transaction() as tx: try: result = await task.run(self.aintq) except Exception: traceback.print_exc() tx.raise_rollback() else: logger.critical('got result: %r', result) # Always delete the task row after execution regardless of success or # failure, in order to avoid duplicate execution await Task.delete.where(Task.ctid == task.ctid).gino.status() def _tick(self): """Scheduled trigger.""" self._ticker = None self._wake_up_one() def _wake_up_one(self): """Wake up one worker, or stop racing workers from falling asleep.""" if self._free: self._semaphore.release() else: self._break = False async def run(self): """Main coroutine.""" # Make sure the schema is present await db.gino.create_all() for sql in Task.trigger: try: await db.status(sql) except DuplicateObjectError: pass except Exception: traceback.print_exc() pass # Start workers for _ in range(self._maxsize - self._size): asyncio.create_task(self.worker()) # Listen to new task events async with db.acquire() as conn: await conn.raw_connection.add_listener( 'aintq_enqueue', lambda *x: self._events.put_nowait(x[3])) while True: await self._events.get() if not self._running: break self._wake_up_one()