async def gen_fake_details(client, message): lel = await edit_or_reply(message, "`Processing...`") fake = Faker() name = str(fake.name()) fake.add_provider(internet) address = str(fake.address()) ip = fake.ipv4_private() cc = fake.credit_card_full() email = fake.ascii_free_email() job = fake.job() android = fake.android_platform_token() pc = fake.chrome() await lel.edit( f"<b><u> Fake Information Generated</b></u>\n<b>Name :-</b><code>{name}</code>\n\n<b>Address:-</b><code>{address}</code>\n\n<b>IP ADDRESS:-</b><code>{ip}</code>\n\n<b>credit card:-</b><code>{cc}</code>\n\n<b>Email Id:-</b><code>{email}</code>\n\n<b>Job:-</b><code>{job}</code>\n\n<b>android user agent:-</b><code>{android}</code>\n\n<b>Pc user agent:-</b><code>{pc}</code>", parse_mode="HTML", )
async def hi(event): if event.fwd_from: return fake = Faker() print("FAKE DETAILS GENERATED\n") name = str(fake.name()) fake.add_provider(internet) address = str(fake.address()) ip = fake.ipv4_private() cc = fake.credit_card_full() email = fake.ascii_free_email() job = fake.job() android = fake.android_platform_token() pc = fake.chrome() await event.edit( f"<b><u> Fake Information Generated</b></u>\n<b>Name :-</b><code>{name}</code>\n\n<b>Address:-</b><code>{address}</code>\n\n<b>IP ADDRESS:-</b><code>{ip}</code>\n\n<b>credit card:-</b><code>{cc}</code>\n\n<b>Email Id:-</b><code>{email}</code>\n\n<b>Job:-</b><code>{job}</code>\n\n<b>android user agent:-</b><code>{android}</code>\n\n<b>Pc user agent:-</b><code>{pc}</code>", parse_mode="HTML", )
async def hi(event): if event.fwd_from: return fake = Faker() print("FAKE DETAILS GENERATED\n") name = str(fake.name()) fake.add_provider(internet) address = str(fake.address()) ip = fake.ipv4_private() cc = fake.credit_card_full() email = fake.ascii_free_email() job = fake.job() android = fake.android_platform_token() pc = fake.chrome() await event.edit( f"<b><u> Fake Information Generated</b></u>\n<b>👥 Nama : </b><code>{name}</code>\n\n<b>🏘️ Alamat : </b><code>{address}</code>\n\n<b>🤖 Alamat IP : </b><code>{ip}</code>\n\n<b>🏧 Kartu Kredit : </b><code>{cc}</code>\n\n<b>🔗 Alamat Email : </b><code>{email}</code>\n\n<b>👨🏻🔧 Pekerjaan : </b><code>{job}</code>\n\n<b>📱 Android User-Agent : </b><code>{android}</code>\n\n<b>🖥️ PC User-Agent : </b><code>{pc}</code>", parse_mode="HTML", )
def fakeid(update: Update, context: CallbackContext): message = update.effective_message dltmsg = message.reply_text("generating fake identity for you...") fake = Faker() print("FAKE DETAILS GENERATED\n") name = str(fake.name()) fake.add_provider(internet) address = str(fake.address()) ip = fake.ipv4_private() cc = fake.credit_card_full() email = fake.ascii_free_email() job = fake.job() android = fake.android_platform_token() pc = fake.chrome() message.reply_text( f"<b> Fake Information Generated</b>\n<b>Name :-</b><code>{name}</code>\n\n<b>Address:-</b><code>{address}</code>\n\n<b>IP ADDRESS:-</b><code>{ip}</code>\n\n<b>credit card:-</b><code>{cc}</code>\n\n<b>Email Id:-</b><code>{email}</code>\n\n<b>Job:-</b><code>{job}</code>\n\n<b>android user agent:-</b><code>{android}</code>\n\n<b>Pc user agent:-</b><code>{pc}</code>", parse_mode=ParseMode.HTML, ) dltmsg.delete()
async def fakegen(event): if event.fwd_from: return if event.is_group: if not await is_user_admin(event, event.message.sender_id): await event.reply("`You Should Be Admin To Do This!`") return fake = Faker() print("FAKE DETAILS GENERATED\n") name = str(fake.name()) fake.add_provider(internet) address = str(fake.address()) ip = fake.ipv4_private() cc = fake.credit_card_full() email = fake.ascii_free_email() job = fake.job() android = fake.android_platform_token() pc = fake.chrome() await event.reply( f"<b><u> Fake Information Generated</b></u>\n<b>Name :-</b><code>{name}</code>\n\n<b>Address:-</b><code>{address}</code>\n\n<b>IP ADDRESS:-</b><code>{ip}</code>\n\n<b>credit card:-</b><code>{cc}</code>\n\n<b>Email Id:-</b><code>{email}</code>\n\n<b>Job:-</b><code>{job}</code>\n\n<b>android user agent:-</b><code>{android}</code>\n\n<b>Pc user agent:-</b><code>{pc}</code>", parse_mode="HTML", )
date_time_between():用法同dates future_date():未来日期 future_datetime():未来时间 month():随机月份 month_name():随机月份(英文) past_date():随机生成已经过去的日期 past_datetime():随机生成已经过去的时间 time():随机24小时时间 timedelta():随机获取时间差 time_object():随机24小时时间,time对象 time_series():随机TimeSeries对象 timezone():随机时区 unix_time():随机Unix时间 year():随机年份 9、python 相关方法 profile():随机生成档案信息 simple_profile():随机生成简单档案信息 pyiterable() pylist() pyset() pystruct() pytuple() pydict() ''' fake = Faker(locale='zh_CN') print(fake.name()) print(fake.country()) print(fake.chrome()) print(fake.profile()) print(fake.android_platform_token()) print(dir(fake))
class AutoGen(commands.Cog): def __init__(self, bot: commands.AutoShardedBot): self.bot = bot self.fake = Faker() self.set_faker_seed.start() async def send(self, ctx, name: str, func: str, color: discord.Color = None) -> discord.Message: embed = discord.Embed(title=name.title(), description=f"```{func}```", color=color or discord.Color.blue()) embed.set_footer(text=f"Disclaimer: The data provided is randomly generated using Faker " "(https://pypi.org/project/Faker/). Any resemblance to real values in any " "way is purely coincidental. This functionality is intended to be used for generating " "values for testing purposes.") return await ctx.channel.send(embed=embed) @tasks.loop(seconds=60) async def set_faker_seed(self): Faker.seed(randint(0, 100000)) def cog_unload(self): if self.set_faker_seed.is_running: self.set_faker_seed.stop() @commands.command(aliases=['aaddress'], desc="Returns an autogenerated address", usage="autoaddress") async def autoaddress(self, ctx): return await self.send(ctx, "address", self.fake.address()) @commands.command(aliases=['acity'], desc="Returns a autogenerated city name", usage="autocity") async def autocity(self, ctx): return await self.send(ctx, "city", self.fake.city()) @commands.command(aliases=['acountry'], desc="Returns an autogenerated country name", usage="autocountry") async def autocountry(self, ctx): return await self.send(ctx, "country", self.fake.country()) @commands.command(aliases=['astreet'], desc="Returns an autogenerated street name", usage="autostreet") async def autostreet(self, ctx): return await self.send(ctx, "street", self.fake.street_name()) @commands.command(aliases=['alp', 'alicense'], desc="Returns an autogenerated license plate", usage="autolicenseplace") async def autolicenseplate(self, ctx): return await self.send(ctx, "license plate", self.fake.license_plate()) @commands.command(aliases=['acolor'], desc="Returns an autogenerated color value", usage="autocolor (format) (hue) (luminosity)", note='`(format)` can be "hex", "hsv", "hsl", or "rgb", defaults to "hex"' '`(hue)` can be "red", "orange", "yellow", "green", "blue", "purple", "pink", ' 'or can be unspecified for a random hue.\n\n' '`(luminosity)` can be "bright", "dark", "light", or unspecified for a random luminosity') async def autocolor(self, ctx, *, options: str = None): options = options.split(" ") if options else [] color_format = extract(options, COLOR_FORMATS, func="lower", default="hex") hue = extract(options, HUES, func="lower") luminosity = extract(options, LUMINOSITIES, func="lower", default="random") return await self.send( ctx, f"{color_format} color", self.fake.color( hue=hue, luminosity=luminosity, color_format=color_format ) ) @commands.command(aliases=['acn', 'acolorname'], desc="Returns an autogenerated color name", usage="autocolorname") async def autocolorname(self, ctx): return await self.send(ctx, "color name", self.fake.color_name()) @commands.command(aliases=['abs'], desc="Returns an autogenerated nonsense string", usage="fnonsense") async def autononsense(self, ctx): return await self.send(ctx, "nonsense", self.fake.bs()) @commands.command(aliases=['acp', 'acatch'], desc="Returns an autogenerated catch-phrase", usage="autocatchphrase") async def autocatchphrase(self, ctx): return await self.send(ctx, "catch phrase", self.fake.catch_phrase()) @commands.command(aliases=['acompany'], desc="Returns an autogenerated company name", usage="autocompany") async def autocompany(self, ctx): return await self.send(ctx, "company", self.fake.company()) @commands.command(aliases=['adate'], desc="Returns an autogenerated date", usage="autodate") async def autodate(self, ctx): return await self.send(ctx, "date", self.fake.date(pattern="%d/%m/%Y")) @commands.command(aliases=['atime'], desc="Returns a random time", usage="autotime") async def autotime(self, ctx): return await self.send(ctx, "time", self.fake.time()) @commands.command(aliases=['aday'], desc="Returns an autogenerated day of the month", usage="autoday") async def autoday(self, ctx): return await self.send(ctx, "day", self.fake.day_of_month()) @commands.command(aliases=['aweek'], desc="Returns an autogenerated day of the week", usage="autoweek") async def autoweek(self, ctx): return await self.send(ctx, "day of the week", self.fake.day_of_week()) @commands.command(aliases=['amonth'], desc="Returns an autogenerated month", usage="automonth") async def automonth(self, ctx): return await self.send(ctx, "month", self.fake.month_name()) @commands.command(aliases=['ayear'], desc="Returns an autogenerated year", usage="autoyear") async def autoyear(self, ctx): return await self.send(ctx, "year", self.fake.year()) @commands.command(aliases=['aemail'], desc="Returns an autogenerated email address", usage="autoemail (domain)", note="`(domain)` can be a domain ending with an extension, " "if unspecified, defaults to a random domain") async def autoemail(self, ctx, *, domain: str = None): return await self.send(ctx, "email address", self.fake.email(domain=domain)) @commands.command(aliases=['aip'], desc="Returns an autogenerated IP address", usage="autoip (version) (class)", note='`(version)` can be either 4 or 6, defaults to 4 if unspecified. ' '`(class)` can be either "a", "b", or "c", defaults to random') async def autoip(self, ctx, *, options: str = None): options = options.split(" ") if options else [] version = extract(options, "46") addr_class = extract(options, "abc", func="lower") if not version or version != "6": func = self.fake.ipv4(address_class=addr_class) else: func = self.fake.ipv6() return await self.send(ctx, "IP address", func) @commands.command(aliases=['amac'], desc="Returns an autogenerated MAC address", usage="automac") async def automac(self, ctx): return await self.send(ctx, "MAC address", self.fake.mac_address()) @commands.command(aliases=['aport'], desc="Returns an autogenerated port number", usage="autoport") async def autoport(self, ctx): return await self.send(ctx, "port", self.fake.port_number()) @commands.command(aliases=['aisbn'], desc="Returns an autogenerated ISBN", usage="autoisbn (version)", note='`(version)` can be either "10" or "13", defaults to 13 if unspecified') async def autoisbn(self, ctx, version: int = None): if not version or version == 13: func = self.fake.isbn13() else: func = self.fake.isbn10() return await self.send(ctx, "ISBN", func) @commands.command(aliases=['ajob'], desc="Retrusn an autogenerated job", usage="autojob") async def autojob(self, ctx): return await self.send(ctx, "job", self.fake.job()) @commands.command(aliases=['afn', 'afirstname'], desc="Returns an autogenerated first name", usage="autofirstname") async def autofirstname(self, ctx): return await self.send(ctx, "first name", self.fake.first_name()) @commands.command(aliases=['aln', 'alastname'], desc="Returns an autogenerated last name", usage="autolastname") async def autolastname(self, ctx): return await self.send(ctx, "last name", self.fake.last_name()) @commands.command(aliases=['an'], desc="Returns an autogenerated full name", usage="autoname") async def autoname(self, ctx): return await self.send(ctx, "name", self.fake.name()) @commands.command(aliases=['auagent', 'auseragent', 'ausera'], desc="Returns an autogenerated user agent", usage="autouseragent (platform)", note='`(platform)` can be "android", "chrome", "firefox", "internet explorer" or "ie", ' '"ios", "linux", "mac", "opera", "safari", "windows", defaults to random') async def autouseragent(self, ctx, platform: str = "random"): funcs = { "android": self.fake.android_platform_token(), "chrome": self.fake.chrome(), "firefox": self.fake.firefox(), "internet explorer": self.fake.internet_explorer(), "ie": self.fake.internet_explorer(), "ios": self.fake.ios_platform_token(), "linux": self.fake.linux_platform_token(), "mac": self.fake.mac_platform_token(), "opera": self.fake.opera(), "safari": self.fake.safari(), "windows": self.fake.windows_platform_token(), "random": self.fake.user_agent() } func = funcs.get(platform.lower(), funcs['random']) return await self.send(ctx, "user agent", func)
async def inline_query_handler(client, query): try: text = query.query.lower() answers = [] if text.strip() == "": answerss = await inline_help_func(__HELP__) await client.answer_inline_query(query.id, results=answerss, cache_time=10) return elif text.split()[0] == "alive": answerss = await alive_function(answers) await client.answer_inline_query(query.id, results=answerss, cache_time=10) elif text.split()[0] == "tr": lang = text.split()[1] tex = text.split(None, 2)[2] answerss = await translate_func(answers, lang, tex) await client.answer_inline_query(query.id, results=answerss, cache_time=10) elif text.split()[0] == "ud": tex = text.split(None, 1)[1] answerss = await urban_func(answers, tex) await client.answer_inline_query(query.id, results=answerss, cache_time=10) elif text.split()[0] == "google": tex = text.split(None, 1)[1] answerss = await google_search_func(answers, tex) await client.answer_inline_query(query.id, results=answerss, cache_time=10) elif text.split()[0] == "webss": tex = text.split(None, 1)[1] answerss = await webss(tex) await client.answer_inline_query(query.id, results=answerss, cache_time=2) elif text.split()[0] == "bitly": tex = text.split(None, 1)[1] answerss = await shortify(tex) await client.answer_inline_query(query.id, results=answerss, cache_time=2) elif text.split()[0] == "wiki": if len(text.split()) < 2: await client.answer_inline_query( query.id, results=answers, switch_pm_text="Wikipedia | wiki [QUERY]", switch_pm_parameter="inline", ) return tex = text.split(None, 1)[1].strip() answerss = await wiki_func(answers, tex) await client.answer_inline_query(query.id, results=answerss, cache_time=2) elif text.split()[0] == "ping": answerss = await ping_func(answers) await client.answer_inline_query(query.id, results=answerss, cache_time=2) return elif text.split()[0] == "yt": answers = [] search_query = text.split(None, 1)[1] search_query = query.query.lower().strip().rstrip() if search_query == "": await client.answer_inline_query( query.id, results=answers, switch_pm_text="Type a YouTube video name...", switch_pm_parameter="help", cache_time=0, ) else: search = VideosSearch(search_query, limit=50) for result in search.result()["result"]: answers.append( InlineQueryResultArticle( title=result["title"], description="{}, {} views.".format( result["duration"], result["viewCount"]["short"] ), input_message_content=InputTextMessageContent( "https://www.youtube.com/watch?v={}".format( result["id"] ) ), thumb_url=result["thumbnails"][0]["url"], ) ) try: await query.answer(results=answers, cache_time=0) except errors.QueryIdInvalid: await query.answer( results=answers, cache_time=0, switch_pm_text="Error: Search timed out", switch_pm_parameter="", ) elif text.split()[0] == "wall": tex = text.split(None, 1)[1] answerss = await wall_func(answers, tex) await client.answer_inline_query(query.id, results=answerss) elif text.split()[0] == "pic": tex = text.split(None, 1)[1] answerss = await wall_func(answers, tex) await client.answer_inline_query(query.id, results=answerss) elif text.split()[0] == "saavn": tex = text.split(None, 1)[1] answerss = await saavn_func(answers, tex) await client.answer_inline_query(query.id, results=answerss) elif text.split()[0] == "deezer": tex = text.split(None, 1)[1] answerss = await deezer_func(answers, tex) await client.answer_inline_query(query.id, results=answerss) elif text.split()[0] == "torrent": tex = text.split(None, 1)[1] answerss = await torrent_func(answers, tex) await client.answer_inline_query(query.id, results=answerss, cache_time=10) elif text.split()[0] == "modapk": sgname = text.split(None, 1)[1] PabloEscobar = ( f"https://an1.com/tags/MOD/?story={sgname}&do=search&subaction=search" ) r = requests.get(PabloEscobar) results = [] soup = BeautifulSoup(r.content, "html5lib") mydivs = soup.find_all("div", {"class": "search-results"}) Pop = soup.find_all("div", {"class": "title"}) cnte = len(mydivs) for cnt in range(cnte): sucker = mydivs[cnt] pH9 = sucker.find("a").contents[0] file_name = pH9 pH = sucker.findAll("img") imme = pH[0]["src"] Pablo = Pop[0].a["href"] ro = requests.get(Pablo) soupe = BeautifulSoup(ro.content, "html5lib") myopo = soupe.find_all("div", {"class": "item"}) capt = f"**{file_name}** \n** {myopo[0].text}**\n**{myopo[1].text}**\n**{myopo[2].text}**\n**{myopo[3].text}**" mydis0 = soupe.find_all("a", {"class": "get-product"}) Lol9 = mydis0[0] lemk = "https://an1.com" + Lol9["href"] rr = requests.get(lemk) soup = BeautifulSoup(rr.content, "html5lib") script = soup.find("script", type="text/javascript") leek = re.search(r'href=[\'"]?([^\'" >]+)', script.text).group() dl_link = leek[5:] results.append( InlineQueryResultPhoto( photo_url=imme, title=file_name, caption=capt, reply_markup=InlineKeyboardMarkup( [ [InlineKeyboardButton("Download Link", url=lemk)], [ InlineKeyboardButton( "Direct Download Link", url=dl_link ) ], ] ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "reddit": subreddit = text.split(None, 1)[1] results = [] reddit = await arq.reddit(subreddit) sreddit = reddit.subreddit title = reddit.title image = reddit.url link = reddit.postLink caption = f"""**Title:** `{title}` Subreddit: `{sreddit}`""" results.append( InlineQueryResultPhoto( photo_url=image, title="Meme Search", caption=caption, reply_markup=InlineKeyboardMarkup( [ [InlineKeyboardButton("PostLink", url=link)], ] ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "imdb": movie_name = text.split(None, 1)[1] results = [] remove_space = movie_name.split(" ") final_name = "+".join(remove_space) page = requests.get( "https://www.imdb.com/find?ref_=nv_sr_fn&q=" + final_name + "&s=all" ) str(page.status_code) soup = BeautifulSoup(page.content, "lxml") odds = soup.findAll("tr", "odd") mov_title = odds[0].findNext("td").findNext("td").text mov_link = ( "http://www.imdb.com/" + odds[0].findNext("td").findNext("td").a["href"] ) page1 = requests.get(mov_link) soup = BeautifulSoup(page1.content, "lxml") if soup.find("div", "poster"): poster = soup.find("div", "poster").img["src"] else: poster = "" if soup.find("div", "title_wrapper"): pg = soup.find("div", "title_wrapper").findNext("div").text mov_details = re.sub(r"\s+", " ", pg) else: mov_details = "" credits = soup.findAll("div", "credit_summary_item") if len(credits) == 1: director = credits[0].a.text writer = "Not available" stars = "Not available" elif len(credits) > 2: director = credits[0].a.text writer = credits[1].a.text actors = [] for x in credits[2].findAll("a"): actors.append(x.text) actors.pop() stars = actors[0] + "," + actors[1] + "," + actors[2] else: director = credits[0].a.text writer = "Not available" actors = [] for x in credits[1].findAll("a"): actors.append(x.text) actors.pop() stars = actors[0] + "," + actors[1] + "," + actors[2] if soup.find("div", "inline canwrap"): story_line = soup.find("div", "inline canwrap").findAll("p")[0].text else: story_line = "Not available" info = soup.findAll("div", "txt-block") if info: mov_country = [] mov_language = [] for node in info: a = node.findAll("a") for i in a: if "country_of_origin" in i["href"]: mov_country.append(i.text) elif "primary_language" in i["href"]: mov_language.append(i.text) if soup.findAll("div", "ratingValue"): for r in soup.findAll("div", "ratingValue"): mov_rating = r.strong["title"] else: mov_rating = "Not available" lol = f"Movie - {mov_title}\n Click to see more" msg = ( "<a href=" + poster + ">​</a>" "<b>Title : </b><code>" + mov_title + "</code>\n<code>" + mov_details + "</code>\n<b>Rating : </b><code>" + mov_rating + "</code>\n<b>Country : </b><code>" + mov_country[0] + "</code>\n<b>Language : </b><code>" + mov_language[0] + "</code>\n<b>Director : </b><code>" + director + "</code>\n<b>Writer : </b><code>" + writer + "</code>\n<b>Stars : </b><code>" + stars + "</code>\n<b>IMDB Url : </b>" + mov_link + "\n<b>Story Line : </b>" + story_line ) results.append( InlineQueryResultArticle( title="Imdb Search", description=lol, input_message_content=InputTextMessageContent( msg, disable_web_page_preview=False, parse_mode="HTML" ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "spaminfo": cmd = text.split(None, 1)[1] results = [] url = f"https://api.intellivoid.net/spamprotection/v1/lookup?query={cmd}" a = await AioHttp().get_json(url) response = a["success"] if response is True: date = a["results"]["last_updated"] stats = f"**◢ Intellivoid• SpamProtection Info**:\n" stats += f' • **Updated on**: `{datetime.fromtimestamp(date).strftime("%Y-%m-%d %I:%M:%S %p")}`\n' stats += f" • **Chat Info**: [Link](t.me/SpamProtectionBot/?start=00_{cmd})\n" if a["results"]["attributes"]["is_potential_spammer"] is True: stats += f" • **User**: `USERxSPAM`\n" elif a["results"]["attributes"]["is_operator"] is True: stats += f" • **User**: `USERxOPERATOR`\n" elif a["results"]["attributes"]["is_agent"] is True: stats += f" • **User**: `USERxAGENT`\n" elif a["results"]["attributes"]["is_whitelisted"] is True: stats += f" • **User**: `USERxWHITELISTED`\n" stats += f' • **Type**: `{a["results"]["entity_type"]}`\n' stats += f' • **Language**: `{a["results"]["language_prediction"]["language"]}`\n' stats += f' • **Language Probability**: `{a["results"]["language_prediction"]["probability"]}`\n' stats += f"**Spam Prediction**:\n" stats += f' • **Ham Prediction**: `{a["results"]["spam_prediction"]["ham_prediction"]}`\n' stats += f' • **Spam Prediction**: `{a["results"]["spam_prediction"]["spam_prediction"]}`\n' stats += f'**Blacklisted**: `{a["results"]["attributes"]["is_blacklisted"]}`\n' if a["results"]["attributes"]["is_blacklisted"] is True: stats += f' • **Reason**: `{a["results"]["attributes"]["blacklist_reason"]}`\n' stats += f' • **Flag**: `{a["results"]["attributes"]["blacklist_flag"]}`\n' stats += f'**PTID**:\n`{a["results"]["private_telegram_id"]}`\n' results.append( InlineQueryResultArticle( title="Spam Info", description="Search Users spam info", input_message_content=InputTextMessageContent( stats, disable_web_page_preview=True ), ) ) await client.answer_inline_query( query.id, cache_time=0, results=results ) elif text.split()[0] == "lyrics": cmd = text.split(None, 1)[1] results = [] song = "" song = Song.find_song(cmd) if song: if song.lyrics: reply = song.format() else: reply = "Couldn't find any lyrics for that song! try with artist name along with song if still doesnt work try `.glyrics`" else: reply = "lyrics not found! try with artist name along with song if still doesnt work try `.glyrics`" if len(reply) > 4095: reply = "lyrics too big, Try using /lyrics" results.append( InlineQueryResultArticle( title="Song Lyrics", description="Click here to see lyrics", input_message_content=InputTextMessageContent( reply, disable_web_page_preview=False ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "pokedex": if len(text.split()) < 2: await client.answer_inline_query( query.id, results=answers, switch_pm_text="Pokemon [text]", switch_pm_parameter="pokedex", ) return pokedex = text.split(None, 1)[1].strip() Pokedex = await pokedexinfo(answers, pokedex) await client.answer_inline_query(query.id, results=Pokedex, cache_time=2) elif text.split()[0] == "paste": tex = text.split(None, 1)[1] answerss = await paste_func(answers, tex) await client.answer_inline_query(query.id, results=answerss, cache_time=2) elif text.split()[0] == "covid": lel = text.split(None, 1)[1] results = [] country = lel.replace(" ", "") data = await fetch(f"https://corona.lmao.ninja/v2/countries/{country}") data = await json_prettify(data) results.append( InlineQueryResultArticle( title="Covid Info Gathered succesfully", description=data, input_message_content=InputTextMessageContent( data, disable_web_page_preview=False ), ) ) await client.answer_inline_query(query.id, results=results, cache_time=2) elif text.split()[0] == "country": lel = text.split(None, 1)[1] results = [] country = CountryInfo(lel) try: a = country.info() except: a = "Country Not Avaiable Currently" name = a.get("name") bb = a.get("altSpellings") hu = "" for p in bb: hu += p + ", " area = a.get("area") borders = "" hell = a.get("borders") for fk in hell: borders += fk + ", " call = "" WhAt = a.get("callingCodes") for what in WhAt: call += what + " " capital = a.get("capital") currencies = "" fker = a.get("currencies") for FKer in fker: currencies += FKer + ", " HmM = a.get("demonym") geo = a.get("geoJSON") pablo = geo.get("features") Pablo = pablo[0] PAblo = Pablo.get("geometry") EsCoBaR = PAblo.get("type") iso = "" iSo = a.get("ISO") for hitler in iSo: po = iSo.get(hitler) iso += po + ", " fla = iSo.get("alpha2") fla.upper() languages = a.get("languages") lMAO = "" for lmao in languages: lMAO += lmao + ", " nonive = a.get("nativeName") waste = a.get("population") reg = a.get("region") sub = a.get("subregion") tik = a.get("timezones") tom = "" for jerry in tik: tom += jerry + ", " GOT = a.get("tld") lanester = "" for targaryen in GOT: lanester += targaryen + ", " wiki = a.get("wiki") caption = f"""<b><u>Information Gathered Successfully</b></u> <b> Country Name:- {name} Alternative Spellings:- {hu} Country Area:- {area} square kilometers Borders:- {borders} Calling Codes:- {call} Country's Capital:- {capital} Country's currency:- {currencies} Demonym:- {HmM} Country Type:- {EsCoBaR} ISO Names:- {iso} Languages:- {lMAO} Native Name:- {nonive} population:- {waste} Region:- {reg} Sub Region:- {sub} Time Zones:- {tom} Top Level Domain:- {lanester} wikipedia:- {wiki}</b> Gathered By Daisy X.</b> """ results.append( InlineQueryResultArticle( title=f"Infomation of {name}", description=f""" Country Name:- {name} Alternative Spellings:- {hu} Country Area:- {area} square kilometers Borders:- {borders} Calling Codes:- {call} Country's Capital:- {capital} Touch for more info """, input_message_content=InputTextMessageContent( caption, parse_mode="HTML", disable_web_page_preview=True ), ) ) await client.answer_inline_query(query.id, results=results, cache_time=2) elif text.split()[0] == "fakegen": results = [] fake = Faker() name = str(fake.name()) fake.add_provider(internet) address = str(fake.address()) ip = fake.ipv4_private() cc = fake.credit_card_full() email = fake.ascii_free_email() job = fake.job() android = fake.android_platform_token() pc = fake.chrome() res = f"<b><u> Fake Information Generated</b></u>\n<b>Name :-</b><code>{name}</code>\n\n<b>Address:-</b><code>{address}</code>\n\n<b>IP ADDRESS:-</b><code>{ip}</code>\n\n<b>credit card:-</b><code>{cc}</code>\n\n<b>Email Id:-</b><code>{email}</code>\n\n<b>Job:-</b><code>{job}</code>\n\n<b>android user agent:-</b><code>{android}</code>\n\n<b>Pc user agent:-</b><code>{pc}</code>" results.append( InlineQueryResultArticle( title="Fake infomation gathered", description="Click here to see them", input_message_content=InputTextMessageContent( res, parse_mode="HTML", disable_web_page_preview=True ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "cs": results = [] score_page = "http://static.cricinfo.com/rss/livescores.xml" page = urllib.request.urlopen(score_page) soup = BeautifulSoup(page, "html.parser") result = soup.find_all("description") Sed = "" for match in result: Sed += match.get_text() + "\n\n" res = f"<b><u>Match information gathered successful</b></u>\n\n\n<code>{Sed}</code>" results.append( InlineQueryResultArticle( title="Match information gathered", description="Click here to see them", input_message_content=InputTextMessageContent( res, parse_mode="HTML", disable_web_page_preview=False ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "antonyms": results = [] lel = text.split(None, 1)[1] word = f"{lel}" let = dictionary.antonym(word) set = str(let) jet = set.replace("{", "") net = jet.replace("}", "") got = net.replace("'", "") results.append( InlineQueryResultArticle( title=f"antonyms for {lel}", description=got, input_message_content=InputTextMessageContent( got, disable_web_page_preview=False ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "synonyms": results = [] lel = text.split(None, 1)[1] word = f"{lel}" let = dictionary.synonym(word) set = str(let) jet = set.replace("{", "") net = jet.replace("}", "") got = net.replace("'", "") results.append( InlineQueryResultArticle( title=f"antonyms for {lel}", description=got, input_message_content=InputTextMessageContent( got, disable_web_page_preview=False ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "define": results = [] lel = text.split(None, 1)[1] word = f"{lel}" let = dictionary.meaning(word) set = str(let) jet = set.replace("{", "") net = jet.replace("}", "") got = net.replace("'", "") results.append( InlineQueryResultArticle( title=f"Definition for {lel}", description=got, input_message_content=InputTextMessageContent( got, disable_web_page_preview=False ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "weather": results = [] sample_url = "https://api.openweathermap.org/data/2.5/weather?q={}&APPID={}&units=metric" input_str = text.split(None, 1)[1] async with aiohttp.ClientSession() as session: response_api_zero = await session.get( sample_url.format(input_str, OPENWEATHERMAP_ID) ) response_api = await response_api_zero.json() if response_api["cod"] == 200: country_code = response_api["sys"]["country"] country_time_zone = int(response_api["timezone"]) sun_rise_time = int(response_api["sys"]["sunrise"]) + country_time_zone sun_set_time = int(response_api["sys"]["sunset"]) + country_time_zone lol = """ WEATHER INFO GATHERED Location: {} Temperature ☀️: {}°С minimium: {}°С maximum : {}°С Humidity 🌤**: {}% Wind 💨: {}m/s Clouds ☁️: {}hpa Sunrise 🌤: {} {} Sunset 🌝: {} {}""".format( input_str, response_api["main"]["temp"], response_api["main"]["temp_min"], response_api["main"]["temp_max"], response_api["main"]["humidity"], response_api["wind"]["speed"], response_api["clouds"]["all"], # response_api["main"]["pressure"], time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(sun_rise_time)), country_code, time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(sun_set_time)), country_code, ) results.append( InlineQueryResultArticle( title=f"Weather Information", description=lol, input_message_content=InputTextMessageContent( lol, disable_web_page_preview=True ), ) ) await client.answer_inline_query( query.id, cache_time=0, results=results ) elif text.split()[0] == "datetime": results = [] gay = text.split(None, 1)[1] lel = gay query_timezone = lel.lower() if len(query_timezone) == 2: result = generate_time(query_timezone, ["countryCode"]) else: result = generate_time(query_timezone, ["zoneName", "countryName"]) if not result: result = f"Timezone info not available for <b>{lel}</b>" results.append( InlineQueryResultArticle( title=f"Date & Time info of {lel}", description=result, input_message_content=InputTextMessageContent( result, disable_web_page_preview=False, parse_mode="html" ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "app": rip = [] app_name = text.split(None, 1)[1] remove_space = app_name.split(" ") final_name = "+".join(remove_space) page = requests.get( "https://play.google.com/store/search?q=" + final_name + "&c=apps" ) str(page.status_code) soup = BeautifulSoup(page.content, "lxml", from_encoding="utf-8") results = soup.findAll("div", "ZmHEEd") app_name = ( results[0] .findNext("div", "Vpfmgd") .findNext("div", "WsMG1c nnK0zc") .text ) app_dev = ( results[0].findNext("div", "Vpfmgd").findNext("div", "KoLSrc").text ) app_dev_link = ( "https://play.google.com" + results[0].findNext("div", "Vpfmgd").findNext("a", "mnKHRc")["href"] ) app_rating = ( results[0] .findNext("div", "Vpfmgd") .findNext("div", "pf5lIe") .find("div")["aria-label"] ) app_link = ( "https://play.google.com" + results[0] .findNext("div", "Vpfmgd") .findNext("div", "vU6FJ p63iDd") .a["href"] ) app_icon = ( results[0] .findNext("div", "Vpfmgd") .findNext("div", "uzcko") .img["data-src"] ) app_details = "<a href='" + app_icon + "'>📲​</a>" app_details += " <b>" + app_name + "</b>" app_details += ( "\n\n<code>Developer :</code> <a href='" + app_dev_link + "'>" + app_dev + "</a>" ) app_details += "\n<code>Rating :</code> " + app_rating.replace( "Rated ", "⭐ " ).replace(" out of ", "/").replace(" stars", "", 1).replace( " stars", "⭐ " ).replace( "five", "5" ) app_details += ( "\n<code>Features :</code> <a href='" + app_link + "'>View in Play Store</a>" ) app_details += "\n\n===> @DaisySupport_Official <===" rip.append( InlineQueryResultArticle( title=f"Datails of {app_name}", description=app_details, input_message_content=InputTextMessageContent( app_details, disable_web_page_preview=True, parse_mode="html" ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=rip) elif text.split()[0] == "gh": results = [] gett = text.split(None, 1)[1] text = gett + ' "site:github.com"' gresults = await GoogleSearch().async_search(text, 1) result = "" for i in range(4): try: title = gresults["titles"][i].replace("\n", " ") source = gresults["links"][i] description = gresults["descriptions"][i] result += f"[{title}]({source})\n" result += f"`{description}`\n\n" except IndexError: pass results.append( InlineQueryResultArticle( title=f"Results for {gett}", description=f" Github info of {title}\n Touch to read", input_message_content=InputTextMessageContent( result, disable_web_page_preview=True ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) elif text.split()[0] == "so": results = [] gett = text.split(None, 1)[1] text = gett + ' "site:stackoverflow.com"' gresults = await GoogleSearch().async_search(text, 1) result = "" for i in range(4): try: title = gresults["titles"][i].replace("\n", " ") source = gresults["links"][i] description = gresults["descriptions"][i] result += f"[{title}]({source})\n" result += f"`{description}`\n\n" except IndexError: pass results.append( InlineQueryResultArticle( title=f"Stack overflow saerch - {title}", description=f" Touch to view search results on {title}", input_message_content=InputTextMessageContent( result, disable_web_page_preview=True ), ) ) await client.answer_inline_query(query.id, cache_time=0, results=results) except (IndexError, TypeError, KeyError, ValueError): return
print("Provider:\t ", fake.add_provider("pl_PL")) print("Date this decade:\t ", fake.date_this_decade()) print("IPv4_public:\t ", fake.ipv4_public()) print("Phone nymber:\t ", fake.phone_number()) print("Secondary address:\t ", fake.secondary_address()) print("Address:\t ", fake.address()) print("Date this month:\t ", fake.date_this_month()) print("IPv6:\t ", fake.ipv6()) print("Postal code:\t ", fake.postalcode()) print("Seed:\t ", fake.seed()) print("AM/PM:\t ", fake.am_pm()) print("This year:\t ", fake.date_this_year()) print("ISBN10:\t ", fake.isbn10()) print("Postal code in state:\t ", fake.postalcode_in_state()) print("Instance:\t ", fake.seed_instance()) print("Android platform token:\t ", fake.android_platform_token()) print("Data time:\t ", fake.date_time()) print("ISBN3:\t ", fake.isbn13()) print("Postal code plus 4:\t ", fake.postalcode_plus4()) print("Sentence:\t ", fake.sentence()) print("Email:\t ", fake.ascii_company_email()) print("Date time add:\t ", fake.date_time_ad()) print("ISO8601:\t ", fake.iso8601()) print("Postcode:\t ", fake.postcode()) print("Sentence:\t ", fake.sentences()) print("ASCII email:\t ", fake.ascii_email()) print("Date time beween:\t ", fake.date_time_between()) print("ITIN:\t ", fake.itin()) print("Postcode in state:\t ", fake.postcode_in_state()) # print("Set formatter:\t ", fake.set_formatter()) print("ASCII free email:\t ", fake.ascii_free_email())