def write_time(self, kwargs, name): if Group in kwargs: id = kwargs[Member].id if Friend in kwargs: id = kwargs[Friend].id a = self.c.execute( f"SELECT * FROM time WHERE ID='{id}' and NAME='{name}'").fetchone( ) if a: logger_info(a) self.c.execute( f"UPDATE time SET TIME=datetime('now') WHERE ID='{id}'") self.conn.commit() else: self.c.execute(f"INSERT INTO time (ID, NAME) VALUES (?, ?)", (id, name)) self.conn.commit()
async def check_news(): logger_info('Checking Minecraft news...') baseurl = 'https://www.minecraft.net' url = 'https://www.minecraft.net/content/minecraft-net/_jcr_content.articles.grid?tileselection=auto&tagsPath=minecraft:article/news,minecraft:article/insider,minecraft:article/culture,minecraft:article/merch,minecraft:stockholm/news,minecraft:stockholm/guides,minecraft:stockholm/deep-dives,minecraft:stockholm/merch,minecraft:stockholm/events,minecraft:stockholm/minecraft-builds,minecraft:stockholm/marketplace&offset=0&count=500&pageSize=10' async with aiohttp.ClientSession() as session: async with session.get(url) as resp: status = resp.status if status == 200: nws = json.loads(await resp.read())['article_grid'] for article in nws: default_tile = article['default_tile'] title = default_tile['title'] image = baseurl + default_tile['image']['imageURL'] desc = default_tile['sub_header'] link = baseurl + article['article_url'] date = article['publish_date'] q = database.check_exist(title) if not q: database.add_news(title, link, desc, image, date) articletext = f'Minecraft官网发布了新的文章:\n{title}\n{link}\n{desc}\n' msgchain = MessageChain.create([ Plain(articletext), Image.fromNetworkAddress(image) ]) for qqgroup in check_enable_modules_all( 'group_permission', 'minecraft_news'): try: await app.sendGroupMessage( int(qqgroup), msgchain) await asyncio.sleep(0.5) except Exception: traceback.print_exc() for qqfriend in check_enable_modules_all( 'friend_permission', 'minecraft_news'): try: await app.sendFriendMessage( int(qqfriend), msgchain) await asyncio.sleep(0.5) except Exception: traceback.print_exc() logger_info(articletext) logger_info('Minecraft news checked.') else: logger_info('Check minecraft news failed:' + status)
def check_time(self, kwargs, name, delay: int): if Group in kwargs: id = kwargs[Member].id if Friend in kwargs: id = kwargs[Friend].id a = self.c.execute( f"SELECT * FROM time WHERE ID='{id}' and NAME='{name}'").fetchone( ) if a: logger_info(a) logger_info( datetime.datetime.strptime(a[2], "%Y-%m-%d %H:%M:%S").timestamp()) logger_info(datetime.datetime.now().timestamp()) check = (datetime.datetime.strptime(a[2], "%Y-%m-%d %H:%M:%S") + datetime.timedelta(hours=8) ).timestamp() - datetime.datetime.now().timestamp() logger_info(check) if check > -delay: return check else: return False else: return False
async def check_weekly(): logger_info('Checking MCWZH weekly...') result = json.loads(await get_url( 'https://minecraft.fandom.com/zh/api.php?action=parse&page=Minecraft_Wiki/weekly&prop=text|revid&format=json' )) html = result['parse']['text']['*'] text = re.sub(r'<p>', '\n', html) # 分段 text = re.sub(r'<(.*?)>', '', text, flags=re.DOTALL) # 移除所有 HTML 标签 text = re.sub(r'\n\n\n', '\n\n', text) # 移除不必要的空行 text = re.sub(r'\n*$', '', text) img = re.findall( r'(?<=src=")(.*?)(?=/revision/latest/scale-to-(width|height)-down/\d{3}\?cb=\d{14}?")', html) page = re.findall(r'(?<=<b><a href=").*?(?=")', html) sended_img = Image.fromNetworkAddress( img[0][0]) if img else Plain('\n(发生错误:图片获取失败)') msg = '发生错误:本周页面已过期,请联系中文 Minecraft Wiki 更新。' if page[0] == '/zh/wiki/%E7%8E%BB%E7%92%83' else '本周的每周页面:\n\n' + text + '\n图片:' + \ img[0][ 0] + '?format=original\n\n页面链接:https://minecraft.fandom.com' + \ page[ 0] + '\n每周页面:https://minecraft.fandom.com/zh/wiki/?oldid=' + str( result['parse']['revid']) chain = MessageChain.create([Plain(msg), sended_img]) for qqgroup in check_enable_modules_all('group_permission', 'weekly_rss'): try: await app.sendGroupMessage(int(qqgroup), chain) await asyncio.sleep(0.5) except Exception: traceback.print_exc() for qqfriend in check_enable_modules_all('friend_permission', 'weekly_rss'): try: await app.sendFriendMessage(int(qqfriend), chain) await asyncio.sleep(0.5) except Exception: traceback.print_exc() logger_info(msg) logger_info('Weekly checked.')
async def get_infobox_pic(link, pagelink, headers): if not infobox_render: return False try: logger_info('Starting find infobox..') wlink = re.sub(r'api.php', '', link) link = re.sub(r'(?:w/|)api.php', '', link) try: async with aiohttp.ClientSession(headers=headers) as session: async with session.get( pagelink, timeout=aiohttp.ClientTimeout(total=20)) as req: html = await req.read() except: traceback.print_exc() return False soup = BeautifulSoup(html, 'html.parser') pagename = uuid.uuid4() url = os.path.abspath(f'./cache/{pagename}.html') if os.path.exists(url): os.remove(url) logger_info('Downloaded raw.') open_file = open(url, 'a', encoding='utf-8') find_infobox = soup.find(class_='notaninfobox') # 我 if find_infobox is None: # 找 find_infobox = soup.find(class_='portable-infobox') # 找 if find_infobox is None: # 找 find_infobox = soup.find(class_='infobox') # 找 if find_infobox is None: # 找 find_infobox = soup.find(class_='tpl-infobox') # 找 if find_infobox is None: # 找 find_infobox = soup.find(class_='infoboxtable') # 找 if find_infobox is None: # 找 find_infobox = soup.find(class_='infotemplatebox') # 找 if find_infobox is None: # 找 find_infobox = soup.find(class_='skin-infobox') # 找 if find_infobox is None: # 找 find_infobox = soup.find(class_='arcaeabox') # 找 (arcw) if find_infobox is None: # 找 return False # 找你妈,不找了<-咱还是回家吧 logger_info('Find infobox, start modding...') for x in soup.find_all(rel='stylesheet'): if x.has_attr('href'): x.attrs['href'] = re.sub(';', '&', urljoin(wlink, x.get('href'))) open_file.write(str(x)) for x in soup.find_all('style'): open_file.write(str(x)) def join_url(base, target): target = target.split(' ') targetlist = [] for x in target: if x.find('/') != -1: x = urljoin(base, x) targetlist.append(x) else: targetlist.append(x) target = ' '.join(targetlist) return target for x in find_infobox.find_all(['a', 'img', 'span']): if x.has_attr('href'): x.attrs['href'] = join_url(link, x.get('href')) if x.has_attr('src'): x.attrs['src'] = join_url(link, x.get('src')) if x.has_attr('srcset'): x.attrs['srcset'] = join_url(link, x.get('srcset')) if x.has_attr('style'): x.attrs['style'] = re.sub(r'url\(/(.*)\)', 'url(' + link + '\\1)', x.get('style')) open_file.write('<body class="mw-parser-output">') open_file.write(str(find_infobox)) open_file.write('</body>') if find_infobox.parent.has_attr('style'): open_file.write(join_url(link, find_infobox.parent.get('style'))) open_file.write('<style>span.heimu a.external,\ span.heimu a.external:visited,\ span.heimu a.extiw,\ span.heimu a.extiw:visited {\ color: #252525;\ }\ .heimu,\ .heimu a,\ a .heimu,\ .heimu a.new {\ background-color: #cccccc;\ text-shadow: none;\ }</style>') open_file.close() read_file = open(url, 'r', encoding='utf-8') html = {'content': read_file.read()} logger_info('Start rendering...') picname = os.path.abspath(f'./cache/{pagename}.jpg') if os.path.exists(picname): os.remove(picname) async with aiohttp.ClientSession() as session: async with session.post(infobox_render, headers={ 'Content-Type': 'application/json', }, data=json.dumps(html)) as resp: with open(picname, 'wb+') as jpg: jpg.write(await resp.read()) return picname except Exception: traceback.print_exc() return False