def getInfo(self, maxNum=1000, **kwargs): self.clearData() pn = 1 num = 0 while True: data = httpGet(self.favApi % (self.media_id, pn), cookies=Config.commonCookies, headers=Config.commonHeaders) if data == None: return data = data.json() if data["data"]["info"]["media_count"] == 0: break for media in data["data"]["medias"]: if num >= maxNum: return bid = media["bvid"] title = media["title"] cover = media["cover"] uploader = media["upper"]["name"] # pages = [{"page": p["page"], "pagename": p["title"], "cid": p["id"]} for p in media["pages"]] v = biliVideo.initFromData(bid, title, uploader, cover, []) if media["attr"] == 0: v.getPages() else: v.status = 404 self.videos.append(v) num += 1 pn += 1
def future_trades(self, symbol, contractType): FUTURE_TRADES_RESOURCE = "/api/v1/future_trades.do" params = '' if symbol: params += '&symbol=' + symbol if params else 'symbol=' + symbol if contractType: params += '&contract_type=' + contractType if params else 'contract_type=' + symbol return httpGet(self.__url, FUTURE_TRADES_RESOURCE, params)
def getPages(self): data = httpGet(self.pagesApi % self.bid) if data == None or data.json()["code"] != 0: return self.pages = [{ "page": d["page"], "pagename": d["part"], "cid": d["cid"] } for d in data.json()["data"]]
def getQualities(self): quality = {} data = httpGet(self.fileApi % ("2", self.sid), headers=self.headers) if data == None: return quality data = data.json() for q in data["data"]["qualities"]: quality[q["type"]] = (q["tag"], q["bps"], q["desc"]) return quality
def future_depth(self, symbol, contractType, size): FUTURE_DEPTH_RESOURCE = "/api/v1/future_depth.do" params = '' if symbol: params += '&symbol=' + symbol if params else 'symbol=' + symbol if contractType: params += '&contract_type=' + contractType if params else 'contract_type=' + symbol if size: params += '&size=' + size if params else 'size=' + size return httpGet(self.__url, FUTURE_DEPTH_RESOURCE, params)
def download(self, url, route, filename, **kwargs): fn = filenameparser(filename) raw_data = httpGet(url, **kwargs) if raw_data == None: return False path = os.path.join(route, fn) if not os.path.exists(route): os.mkdir(route) with open(path, "wb+") as f: f.write(raw_data.content) return True
def getInfo(self, **kwargs): data = httpGet(self.detailApi % self.bid) if data == None: return data = data.json() try: self.title = data["data"]["View"]["title"] self.uploader = data["data"]["View"]["owner"]["name"] self.cover = data["data"]["View"]["pic"] self.getPages() except: pass
def getInfo(self, **kwargs): data = httpGet(self.infoApi % self.sid, headers=self.headers) if data == None: return data = data.json() try: self.title = data["data"]["title"] self.uploader = data["data"]["author"] self.lyric = data["data"]["lyric"] self.cover = data["data"]["cover"] except: pass
def getQualities(self, page=1): quality = {} cid = self.getPageCid(page) if cid == 0: return quality data = httpGet(self.playurlApi % (self.bid, cid, 32)) if data == None: return quality data = data.json() formats = data["data"]["accept_format"].split(",") for index, qn in enumerate(data["data"]["accept_quality"]): quality[qn] = (formats[index], data["data"]["accept_description"][index]) return quality
def getInfo(self, maxNum=1000, **kwargs): self.clearData() num = 0 pn = 1 api = self.infoApi % (self.sid, "%s") while True: data = httpGet(self.infoApi % (self.sid, pn), headers=Config.commonHeaders) if data == None: return data = data.json() for audio in data["data"]["data"]: if num >= maxNum: return self.audio.append( biliAudio.initFromData(audio["id"], audio["title"], audio["author"], audio["lyric"], audio["cover"])) num += 1 if data["data"]["pageCount"] == data["data"]["curPage"]: break pn += 1
def getPlayurl(self, page=0, qn=116): urls = [] if page == 0: page = self.currentPage cid = self.getPageCid(page) if cid == 0: return {} data = httpGet(self.playurlApi % (self.bid, cid, qn), headers=Config.commonHeaders, cookies=Config.commonCookies) if data == None: return {} data = data.json() for u in data["data"]["durl"]: urls.append(u["url"]) if u["backup_url"] != None: urls.append(u["backup_url"]) return { "qn": data["data"]["quality"], "format": data["data"]["format"], "urls": urls }
def getInfo(self, **kwargs): if self.bid == "": return url = self.bangumiUrl % self.bid rawhtml = httpGet(url) if rawhtml == None: return try: rawhtml = rawhtml.text initial_state = json.loads( re.search(r"__INITIAL_STATE__={(.*?)]};", rawhtml).group()[18:-1]) self.title = initial_state["mediaInfo"]["title"] self.cover = "https:" + initial_state["mediaInfo"]["cover"] eplist = initial_state["epList"] self.bid = eplist[0]["bvid"] self.currentPage = 1 pages = [] epid = "" if "ep" in url: epid = str(re.search(r"ep[0-9]+", url).group()[2:]) for index, ep in enumerate(eplist, start=1): pages.append({ "page": index, "pagename": "%s %s" % (ep["titleFormat"], ep["longTitle"]), "cid": ep["cid"] }) if epid == str(ep["id"]): self.currentPage = index self.pages = pages except Exception as e: print(repr(e)) pass
def getCdns(self, quality=2): data = httpGet(self.fileApi % (quality, self.sid), headers=self.headers) return [] if data == None else data.json()["data"]["cdns"]
def uploadTemperature(self, serial, temperature): if temperature != 0.0: url = "http://monitor.shajen.pl/api/temp/add?serial=%s&temperature=%.2f&key=%s" % (serial, temperature, config.UPLOADER_KEY) utils.httpGet(url)
def future_index(self, symbol): FUTURE_INDEX = "/api/v1/future_index.do" params = '' if symbol: params = 'symbol=' + symbol return httpGet(self.__url, FUTURE_INDEX, params)
def exchange_rate(self): EXCHANGE_RATE = "/api/v1/exchange_rate.do" return httpGet(self.__url, EXCHANGE_RATE, '')
def future_estimated_price(self, symbol): FUTURE_ESTIMATED_PRICE = "/api/v1/future_estimated_price.do" params = '' if symbol: params = 'symbol=' + symbol return httpGet(self.__url, FUTURE_ESTIMATED_PRICE, params)