async def send(self, app: Slave, yummy: [], group: Group, prefix: str): try: yande: PictureRipperListener.dataClass = yummy[0] img_byte: bytes = await yande.get() msg = [Image.fromUnsafeBytes(img_byte)] if self.ym == "ehentai" and hasattr(yande, 'gid'): msg.append(Plain(f'{yande.gid}/{yande.token}')) with enter_message_send_context(UploadMethods.Group): msg_chain = await MeCh.create(msg).build() image: Image = msg_chain.__root__[0] bot_message = await app.sendGroupMessage( group, msg_chain) # At(sender.id), Plain(prefix_ + data_.purl), if len(self.GCache) >= 150: self.GCache.pop(list(self.GCache.keys())[0]) logger.info('Cache is full,pop first one') ext = yande.url.split('.')[-1] self.GCache[(group.id << 32) + bot_message.messageId] = [ image, ext, yande.id, yande.__class__.__name__ ] logger.info(f"{prefix}sent,tags:{yande.tags}") await self.reCallYms(app, bot_message.messageId, 60) except asyncio.TimeoutError as e: logger.exception("[YummyPictures]: " + 'Timeout' + str(e)) raise e except ValueError as e: logger.exception("[YummyPictures]: " + 'Size check failed' + str(e)) raise e
async def save(self): try: f = open(self.databaseFile, 'w', encoding='utf-8') f.write(json.dumps(self.database, indent=1, ensure_ascii=False)) f.close() logger.info(f"save {self.model} database successfully.") except IOError as e: raise e
def getRating(self, source, group, force=False): if group not in self.ratings.keys() or force: level = ymConfig.getConfig('setting').get('group_rate')[str(group)] if str(group) in ymConfig.getConfig( 'setting').get('group_rate').keys() else ymConfig.getConfig('setting').get('rating') rs = {k: v for k, v in ymConfig.getConfig(source).get('rating').items() if v <= level} rating = sorted(rs.items(), key=lambda d: d[1], reverse=True) self.ratings[group] = rating[0][0] logger.info(f"rating {group} is {self.ratings[group]}")
async def addYummy(self, yummy: YummyData): exist: dict = await self.find(str(yummy.id)) if not len(exist): self.database[str(yummy.id)] = yummy.__dict__ await self.save() logger.info( f"add and save {str(yummy.id)} to {self.model} database") else: logger.info(f"{str(yummy.id)} already in {self.model} database")
def load(self): if not os.path.exists(self.databaseFile): f = open(self.databaseFile, 'w') f.write(json.dumps({})) f.close() f = open(self.databaseFile, 'r', encoding='utf-8') self.database = json.loads(f.read()) f.close() logger.info(f"read {self.model} database from local dict")
async def get(self) -> list: headers = {} try: async with aiohttp.request('GET', self.url, headers=headers, connector=self.connector, timeout=aiohttp.ClientTimeout(600)) as resp: logger.info(self.url) result = json.loads(await resp.read())['results'] if self.connector: await self.connector.close() except (asyncio.TimeoutError, ValueError) as e: raise e return result
async def getList(self, num: int = 3): headers = {} try: async with aiohttp.request( 'GET', self.url, headers=headers, connector=self.connector, timeout=aiohttp.ClientTimeout(600)) as resp: logger.info(self.url) soup = BeautifulSoup(await resp.read(), "lxml") ''' if self.source == 0: movies = soup.find_all("a", class_="movie-box") for movie in movies: frame = movie.contents[1].contents[1] detailLink = movie['href'] picLink = frame['src'] picInfo = frame['title'] async with aiohttp.request('GET', detailLink, headers=headers, connector=self.connector, timeout=aiohttp.ClientTimeout(600)) as res: subSoup = BeautifulSoup(await res.read(), "lxml") subs = subSoup.find_all("td") info(detailLink) print(subs) return ''' if self.source == 2: sear = soup.find_all("div", class_="data-list") result = [] if sear: movies = sear[0].contents[3::2] for movie in movies: if num < 1: break num -= 1 magnet = 'magnet:?xt=urn:btih:' + movie.contents[ 1]['href'].split('/')[-1] title = movie.contents[1]['title'] intel = movie.contents[1].contents[3].string result.append( {'text': f'{title}\n{intel}\n{magnet}\n\n'}) if self.connector: await self.connector.close() return result except (asyncio.TimeoutError, ValueError) as e: raise e return
async def get(self, offset: int = 0, limit: int = 3) -> list: bts = await self.getList(limit) if self.source == 0: return bts if self.source == 2: return bts headers = {} try: data = await requestText(self.url, headers=headers, connector=self.connector, raw=False) logger.info(self.url) result = data['results'] if self.connector: await self.connector.close() except (asyncio.TimeoutError, ValueError) as e: raise e return result
def __str__(self): logger.info("Can not find searcher key,check setting.json")
def __str__(self): logger.info("Can not find proxy,check setting.json")