def down_images_from_ppy(uids): logging.info('image down:%s' % str(uids)) imgs_url = get_images_ppy_url2(uids) r = interRequest.interReq() for idx, imgs_url in enumerate(imgs_url): res = r.down_image(uids[idx], url=imgs_url, path=img_path) logging.info('图片下载情况 %s' % res)
def down_bg(bid='1028215', sid='480609'): iq = interRequest.interReq() #计算出url bgUrl = cal_bg_url(bid, sid) if bgUrl == 0: return 0 return iq.down_image(iname=bid, url=bgUrl, path=bg_path, verify=False)
def get_images_ppy_url(uids): raw_url = "http://www.int100.org/api/get_avatars.php?u={u}" r = interRequest.interReq() ustr = ','.join(uids) url = raw_url.format(u=ustr) logging.info(url) res = r.get(url) res = res[:-1] return res.split(',')
def crawlPageByGet(api, **kw): # 抓取方法 ret = None req = interRequest.interReq() url = ref[api].format(**kw) res = req.get(url) logging.info('crawlPageByGet url:%s|status:%s|', url, res) if res.status_code == 200 and res.text: ret = res.text return ret
def downImage(url, iname, useCache=1): r = interRequest.interReq() res = 1 if useCache: if not os.path.exists(img_path + iname + '.jpg'): res = r.down_image(iname, url=url, path=img_path) else: res = r.down_image(iname, url=url, path=img_path) if res == 1 and ".jpg" not in iname: return iname + '.jpg' return ""
def cal_bg_url(bid, sid): #从sayobot处下载 try: filename = "" search_file_url = f"https://api.sayobot.cn/v2/beatmapinfo?K={sid}" r = interRequest.interReq() info = r.get(search_file_url, verify=False) map = json.loads(info.text) for r in map["data"]["bid_data"]: if str(r["bid"]) == bid: filename = r["bg"] break else: return 0 bgUrl = f"https://dl.sayobot.cn/beatmaps/files/{sid}/{filename}" logging.info(f"bid:{bid}, sid:{sid}, file url:{bgUrl}") return bgUrl except: logging.exception(f"down bid:{bid} fail") return 0
def down_bg(bid='1028215'): iq = interRequest.interReq() return iq.down_image(iname=bid, url=bloodcat_bg % bid, path=bg_path)