def __loadCover(self, ps: PictureSource): if ps == None: self.playing_cover.configure(image=self.empty_image) self.playing_cover.image = self.empty_image return data = vhttp.httpGet(ps.url, headers=ps.headers).content img = ImageTk.PhotoImage(Image.open(io.BytesIO(data)).resize((100, 100))) self.playing_cover.configure(image=img) self.playing_cover.image = img
def getMyNew(self,cookie=Config.getCookie("bilibili")): resp = httpGet(self.dynamicApi, cookies=cookie) if resp == None: return [] resp = resp.json() if resp["code"] == 0 and len(resp["data"]["cards"]) > 0: tmp = self.current self.current = resp["data"]["cards"][0]["desc"]["dynamic_id"] return [c["desc"] for c in resp["data"]["cards"] if c["desc"]["dynamic_id"] > tmp] return []
def download(self, url, route, filename,**kwargs): fn = file.parseFilename(filename) raw_data = vhttp.httpGet(url, **kwargs) if raw_data == None: return False path = os.path.join(route, fn) if not os.path.exists(route): os.mkdir(route) with open(path,"wb+") as f: f.write(raw_data.content) return True
def getQrcode(self): data = httpGet(self.qrApi).json() qrurl = data["data"]["url"] self.oauthKey = data["data"]["oauthKey"] qc = qrcode.QRCode() qc.add_data(qrurl) if self.console: qc.print_ascii(invert=self.invert) else: qc.make_image().save("./qrcode.png") os.system("qrcode.png")
def load(self, **kwargs): container = JsonResponseContainer(liveApi.getLiveInfo(self.rid), live_stauts="data.live_status", room_id="data.room_id") if container.data["live_stauts"] == 1: self.room_id = container.data["room_id"] res = httpGet(self.baseUrl % self.rid, headers={ "origin": "www.bilibili.com", "referer": self.baseUrl % self.id, "user-agent": Config.commonHeaders["user-agent"] }).content.decode("utf-8") tp = r"<title id=\"link-app-title\">.*<\/title>" if re.search(tp, res): self.title = re.search(tp, res).group()[27:-8:]
def search(cls, keyword, page=1, *args, **kwargs): # todo: container for beautifulsoup? url = imomoeApi.API.search_api(keyword, page) html_text = formats.htmlAutoDecode(httpGet(url).content) pg = re.search(r"页次:[0-9]+/[0-9]+页", html_text) if pg == None: return None pagenum = pg.group()[3:-1:].split("/") cp, tp = int(pagenum[0]), int(pagenum[1]) soup = BeautifulSoup(html_text, "html.parser") rs = [] for li in soup.find("div", {"class": "pics"}).find_all("li"): rs.append( SearchResult(cls.base_url + li.a["href"][1::], Config.commonHeaders, li.h2.a["title"], cls.initFromUrl(cls.base_url + li.a["href"][1::]), cls.getSourceName(), "video")) return SearchResults(rs, cp, tp)
def load(self, **kwargs): # raw_html = httpGet(self.player_url.format(aid=self.aid, pid=self.pid), # cookies=Config.getCookie("kakadm")) # html_text = raw_html.content.decode("utf-8").replace("\r", "").replace("\n", "") # self.title = re.search(r"play_title=\"((?!\";).)*\";", html_text).group()[12:-2:] container = RegExpResponseContainer( kakadmApi.getVideoInfo(self.aid, self.pid), strip=["\r", "\n"], title=(r"play_title=\"((?!\";).)*\";", lambda x: x[12:-2:]), movurls=r"<div class=\"movurls\">((?!</div>).)*</div>") self.title = container.data["title"] self.pid_list = [ str(i) for i in range(1, container.data["movurls"].count("</li>") + 1) ] data_html = httpGet( self.src_api.format(aid=self.aid, pid=self.pid), cookies=Config.getCookie("kakadm")).content.decode("utf-8") self.src = re.search(r"vid=(.*)\'", data_html).group()[4:-1:]