async def _upload_worker(client, message, reply, torrent_info, user_id, send_as_zip): files = dict() sent_files = [] with tempfile.TemporaryDirectory(dir=str(user_id)) as zip_tempdir: if send_as_zip: filename = torrent_info['bittorrent']['info']['name'][ -251:] + '.zip' filepath = os.path.join(zip_tempdir, filename) def _zip_files(): with zipfile.ZipFile(filepath, 'x') as zipf: for file in torrent_info['files']: zipf.write( file['path'], file['path'].replace( os.path.join(torrent_info['dir'], ''), '', 1)) await asyncio.gather( reply.edit_text('Download successful, zipping files...'), client.loop.run_in_executor(None, _zip_files)) asyncio.create_task( reply.edit_text('Download successful, uploading files...')) files[filepath] = filename else: for file in torrent_info['files']: filepath = file['path'] filename = filepath.replace( os.path.join(torrent_info['dir'], ''), '', 1) files[filepath] = filename for filepath in natsorted(files): sent_files.extend(await _upload_file(client, message, reply, files[filepath], filepath)) text = 'Files:\n' parser = pyrogram_html.HTML(client) quote = None first_index = None all_amount = 1 for filename, filelink in sent_files: atext = f'- <a href="{filelink}">{html.escape(filename)}</a>\n' futtext = text + atext if all_amount > 100 or len( (await parser.parse(futtext))['message']) > 4096: thing = await message.reply_text(text, quote=quote) if first_index is None: first_index = thing quote = False futtext = atext all_amount = 1 await asyncio.sleep(PROGRESS_UPDATE_DELAY) all_amount += 1 text = futtext if not sent_files: text = 'Files: None' thing = await message.reply_text(text, quote=quote) if first_index is None: first_index = thing asyncio.create_task( reply.edit_text( f'Download successful, files uploaded.\nFiles: {first_index.link}') )
async def list_leeches(client, message): user_id = message.from_user.id text = '' quote = None parser = pyrogram_html.HTML(client) for i in await aria2_tell_active(session): if i.get('bittorrent'): info = i['bittorrent'].get('info') if not info: continue tor_name = info['name'] else: tor_name = os.path.basename(i['files'][0]['path']) if not tor_name: tor_name = urldecode( os.path.basename( urlparse(i['files'][0]['uris'][0]['uri']).path)) a = f'''<b>{html.escape(tor_name)}</b> <code>{i['gid']}</code>\n\n''' futtext = text + a if len((await parser.parse(futtext))['message']) > 4096: await message.reply_text(text, quote=quote) quote = False futtext = a text = futtext if not text: text = 'No leeches found.' await message.reply_text(text, quote=quote)
async def anilist_query(client, inline_query): if inline_query.from_user.id not in app_user_ids: await inline_query.answer([ InlineQueryResultArticle('...no', InputTextMessageContent('...no')) ], cache_time=3600, is_personal=True) return character = bool(inline_query.matches[0].group(1)) query = inline_query.matches[0].group(2).strip().lower() async with anilists_lock: if (character, query) not in all_anilists: async with session.post( 'https://graphql.anilist.co', data=json.dumps({ 'query': CHARACTER_QUERY if character else MEDIA_QUERY, 'variables': { 'search': query } }), headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }) as resp: all_anilists[(character, query)] = (await resp.json( ))['data']['Page']['characters' if character else 'media'] anilists = all_anilists[(character, query)] answers = [] parser = pyrogram_html.HTML(client) for a, anilist in enumerate(anilists): text, image = await (generate_character if character else generate_media)(anilist) buttons = [ InlineKeyboardButton('Back', 'anilist_back'), InlineKeyboardButton(f'{a + 1}/{len(anilists)}', 'anilist_nop'), InlineKeyboardButton('Next', 'anilist_next') ] if not a: buttons.pop(0) if len(anilists) == a + 1: buttons.pop() split = text.split('\n', 1) title = (await parser.parse(split[0]))['message'] try: description = (await parser.parse(split[1]))['message'] except IndexError: description = None answers.append( InlineQueryResultPhoto(image, title=title, description=description, caption=text, reply_markup=InlineKeyboardMarkup([buttons ]), id=f'anilist{a}-{time.time()}')) await inline_query.answer(answers, is_personal=True, is_gallery=False)
async def main_help(client, inline_query): if inline_query.from_user.id not in app_user_ids: await inline_query.answer([ InlineQueryResultArticle('...no', InputTextMessageContent('...no')) ], cache_time=3600, is_personal=True) return buttons = [] to_append = [] prefixes = config['config']['prefixes'] or [] if not isinstance(prefixes, list): prefixes = prefixes.split() prefixes = ', '.join(prefixes) prefix = prefixes[0] results = [] parser = pyrogram_html.HTML(client) me = None for internal_name in sorted(help_dict): external_name, help_text = help_dict[internal_name] if '{bot}' in help_text: if not me: me = await client.get_me() text = f'Help for {html.escape(external_name)}:\nAvaliable prefixes: {prefixes}\n\n{help_text.format(prefix=prefix, bot=getattr(me, "username", None))}' to_append.append( InlineKeyboardButton(external_name, f'help_m{internal_name}')) if len(to_append) > 2: buttons.append(to_append) to_append = [] results.append( InlineQueryResultArticle( external_name, InputTextMessageContent(text), reply_markup=InlineKeyboardMarkup( [[InlineKeyboardButton('Back', 'help_back')]]), description=(await parser.parse( help_text.format(prefix=prefix, bot=getattr(me, 'username', None))))['message'], id=f'helpm{internal_name}-{time.time()}')) else: if to_append: buttons.append(to_append) results.insert( 0, InlineQueryResultArticle( 'Main Menu', InputTextMessageContent('Select the plugin you want help with'), reply_markup=InlineKeyboardMarkup(buttons), id=f'helpa-{time.time()}')) await inline_query.answer(results, is_personal=True)
async def return_search(query, page=1, sukebei=False): page -= 1 query = query.lower().strip() used_search_info = search_info[sukebei] async with search_lock: results, get_time = used_search_info.get(query, (None, 0)) if (time.time() - get_time) > 3600: results = [] async with session.get( f'https://{"sukebei." if sukebei else ""}meowinjapanese.cf/?page=rss&q={urlencode(query)}' ) as resp: d = feedparser.parse(await resp.text()) text = '' a = 0 parser = pyrogram_html.HTML(None) for i in sorted(d['entries'], key=lambda i: int(i['nyaa_seeders']), reverse=True): if i['nyaa_size'].startswith('0'): continue if not int(i['nyaa_seeders']): break link = i['link'] splitted = urlsplit(link) if splitted.scheme == 'magnet' and splitted.query: link = f'<code>{link}</code>' newtext = f'''{a + 1}. {html.escape(i["title"])} <b>Link:</b> {link} <b>Size:</b> {i["nyaa_size"]} <b>Seed:</b> {i["nyaa_seeders"]} <b>Leech:</b> {i["nyaa_leechers"]} <b>Category:</b> {i["nyaa_category"]} <b>Mirror:</b> <code>/{BotCommands.MirrorCommand} {link}</code> <b>Mirror Zip:</b> <code>/{BotCommands.TarMirrorCommand} {link}</code> \n\n''' futtext = text + newtext if (a and not a % 10) or len( (await parser.parse(futtext))['message']) > 4096: results.append(text) futtext = newtext text = futtext a += 1 results.append(text) ttl = time.time() used_search_info[query] = results, ttl try: return results[page], len(results), ttl except IndexError: return '', len(results), ttl
async def return_search(query, page=1, sukebei=False): page -= 1 query = query.lower().strip() used_search_info = search_info[sukebei] async with search_lock: results, get_time = used_search_info.get(query, (None, 0)) if (time.time() - get_time) > 3600: results = [] async with session.get( f'https://{"sukebei." if sukebei else ""}nyaa.si/?page=rss&q={urlencode(query)}' ) as resp: d = feedparser.parse(await resp.text()) text = "" a = 0 parser = pyrogram_html.HTML(None) for i in sorted(d["entries"], key=lambda i: int(i["nyaa_seeders"]), reverse=True): if i["nyaa_size"].startswith("0"): continue if not int(i["nyaa_seeders"]): break link = i["link"] splitted = urlsplit(link) if splitted.scheme == "magnet" and splitted.query: link = f"<code>{link}</code>" newtext = f"""{a + 1}. {html.escape(i["title"])} <b>Link:</b> {link} <b>Size:</b> {i["nyaa_size"]} <b>Seeders:</b> {i["nyaa_seeders"]} <b>Leechers:</b> {i["nyaa_leechers"]} <b>Category:</b> {i["nyaa_category"]}\n\n""" futtext = text + newtext if (a and not a % 10) or len( (await parser.parse(futtext))["message"]) > 4096: results.append(text) futtext = newtext text = futtext a += 1 results.append(text) ttl = time.time() used_search_info[query] = results, ttl try: return results[page], len(results), ttl except IndexError: return "", len(results), ttl
async def generate_character(anilist): title_full = anilist['name']['full'] title_native = anilist['name']['native'] title_alternative = ', '.join(anilist['name']['alternative']) description = (anilist['description'] or '').strip() site_url = anilist['siteUrl'] image = anilist['image']['large'] text = f'<a href="{site_url}">{title_full}</a>' if title_native: text += f' ({title_native})' if title_alternative: text += f'\n<b>Synonyms:</b> {title_alternative}' if description: text += '\n' parser = pyrogram_html.HTML(None) total_length = len((await parser.parse(text))['message']) if len(description) > 1023 - total_length: description = description[:1022 - total_length] + '…' text += description return text, image
async def list_leeches(client, message): user_id = message.from_user.id text = '' quote = None parser = pyrogram_html.HTML(client) for i in await aria2_tell_active(session): if message.chat.id in ADMIN_CHATS or is_gid_owner(user_id, i['gid']): info = i['bittorrent'].get('info') if not info: continue a = f'''<b>{html.escape(info['name'])}</b> <code>{i['gid']}</code>\n\n''' futtext = text + a if len((await parser.parse(futtext))['message']) > 4096: await message.reply_text(text, quote=quote) quote = False futtext = a text = futtext if not text: text = 'No leeches by you found.' await message.reply_text(text, quote=quote)
async def _upload_worker(client, message, reply, torrent_info, user_id, flags): files = dict() sent_files = [] with tempfile.TemporaryDirectory(dir=str(user_id)) as zip_tempdir: if SendAsZipFlag in flags: if torrent_info.get('bittorrent'): filename = torrent_info['bittorrent']['info']['name'] else: filename = os.path.basename(torrent_info['files'][0]['path']) filename = filename[-251:] + '.zip' filepath = os.path.join(zip_tempdir, filename) def _zip_files(): with zipfile.ZipFile(filepath, 'x') as zipf: for file in torrent_info['files']: filename = file['path'].replace( os.path.join(torrent_info['dir'], ''), '', 1) if IGNORE_PADDING_FILE and re.match( r'(?i)^_+padding_file', filename) is not None: continue zipf.write(file['path'], filename) await asyncio.gather( reply.edit_text('Download successful, zipping files...'), client.loop.run_in_executor(None, _zip_files)) asyncio.create_task( reply.edit_text('Download successful, uploading files...')) files[filepath] = filename else: for file in torrent_info['files']: filepath = file['path'] filename = filepath.replace( os.path.join(torrent_info['dir'], ''), '', 1) if IGNORE_PADDING_FILE and re.match(r'(?i)^_+padding_file', filename): continue if LICHER_PARSE_EPISODE: filename = re.sub( r'\s*(?:\[.+?\]|\(.+?\))\s*|\.[a-z][a-z0-9]{2}$', '', os.path.basename(filepath)).strip() or filename files[filepath] = filename for filepath in natsorted(files): sent_files.extend(await _upload_file(client, message, reply, files[filepath], filepath, ForceDocumentFlag in flags)) text = '✨ <b>#Files:</b> \n\n' parser = pyrogram_html.HTML(client) quote = None first_index = None all_amount = 1 for filename, filelink in sent_files: if filelink: atext = f'- <a href="{filelink}">{html.escape(filename)}</a>' else: atext = f'- {html.escape(filename)} (empty)' atext += '\n' futtext = text + atext if all_amount > 100 or len( (await parser.parse(futtext))['message']) > 4096: thing = await message.reply_text(text, quote=quote, disable_web_page_preview=True) if first_index is None: first_index = thing quote = False futtext = atext all_amount = 1 await asyncio.sleep(PROGRESS_UPDATE_DELAY) all_amount += 1 text = futtext if not sent_files: text = 'Files: None' elif LICHER_CHAT and LICHER_STICKER and message.chat.id in ADMIN_CHATS: await client.send_sticker(LICHER_CHAT, LICHER_STICKER) thing = await message.reply_text(text, quote=quote, disable_web_page_preview=True) if first_index is None: first_index = thing asyncio.create_task( reply.edit_text( f'Download successful, files uploaded.\nFiles: {first_index.link}', disable_web_page_preview=True))
async def generate_media(anilist): title_romaji = anilist['title']['romaji'] title_english = anilist['title']['english'] title_native = anilist['title']['native'] if anilist['format'] == 'NOVEL': type = 'Light Novel' else: type = anilist['type'].capitalize() format = anilist['format'] format = FORMAT_NAMES.get(format, format) status = (anilist['status'] or 'Unknown').replace('_', ' ').title() description = (anilist.get('description') or '').strip() episodes = anilist['episodes'] duration = anilist['duration'] chapters = anilist['chapters'] volumes = anilist['volumes'] genres = ', '.join(anilist['genres']) synonyms = ', '.join(anilist['synonyms']) average_score = anilist['averageScore'] site_url = anilist['siteUrl'] next_airing_episode = anilist['airingSchedule'] if next_airing_episode: next_airing_episode = next_airing_episode['nodes'] if next_airing_episode: next_airing_episode = next_airing_episode[0] text = f'<a href="{site_url}">{title_romaji}</a>' if title_english: text += f' ({title_english})' if title_native: text += f' ({title_native})' if synonyms: text += f'\n<b>Synonyms:</b> {synonyms}' if genres: text += f'\n<b>Genres:</b> {genres}' text += f'\n<b>Type:</b> {type}\n' if anilist['type'] != 'MANGA': text += f'<b>Format:</b> {format}\n' text += f'<b>Status:</b> {status}\n' if next_airing_episode: airing_at = str( datetime.datetime.fromtimestamp(next_airing_episode['airingAt'])) time_until_airing = str( datetime.timedelta(seconds=next_airing_episode['timeUntilAiring'])) text += f'<b>Airing At:</b> {airing_at}\n<b>Airing In:</b> {time_until_airing}\n' if average_score is not None: text += f'<b>Average Score:</b> {average_score}%\n' if (episodes is not None or next_airing_episode) and anilist['format'] != 'MOVIE': text += f'<b>Episodes:</b> ' if next_airing_episode: text += f'{next_airing_episode["episode"] - 1}/' text += f'{"???" if episodes is None else episodes}\n' if duration: text += f'<b>Duration:</b> {duration} minutes{" per episode" if anilist["format"] != "MOVIE" else ""}\n' if chapters: text += f'<b>Chapters:</b> {chapters}\n' if volumes: text += f'<b>Volumes:</b> {volumes}\n' if description: text += '<b>Description:</b>\n' parser = pyrogram_html.HTML(None) total_length = len((await parser.parse(text))['message']) if len(description) > 1023 - total_length: description = description[:1022 - total_length] + '…' text += description return text, f"https://img.anili.st/media/{anilist['id']}"