def jobProducer(): logger = tool.getLogger() logger.debug("start video Producer") channel = tool.channelConf db = tool.getDB() workList = [] for i in channel.data: per = channel[i] plat = per["platform"] bean = VideoFactory.getBean(plat) workList += bean.GetVideos(per, tool.settingConf["Platform"][plat]) try: cnt = 0 for i in workList: video_id = i.channelParam["id"] db_res = db.execute("select count(vid) from data where vid=?;", (video_id, )).fetchone()[0] if int(db_res) != 0: # print(video_id) continue if unique.checkAndInsert(video_id): cnt += 1 buffer.put(i, block=True) logger.info( f"new: {cnt}, sum: {unique.size()}, rest: {buffer.qsize()}") except Exception: logger = tool.getLogger() logger.error(f"upload-P", exc_info=True) db.close()
def run(): # {"title": tmp_data["title"], "id": video_id, "av": _["av"]} work = GetPlayList.get_work_list() logger = tool.getLogger() account = tool.QRLogin() for i in work: logger.debug(json.dumps(i)) logger.info("start: vid[{}], 1080P[{}], Multipart[{}]".format( i["id"], i["hd"], i["multipart"])) vmer = getVideo.VideoManager(i["id"], i["hd"]) data = vmer.getVideo() if data[0]: if i["multipart"]: success, res, upos_uri = Upload.uploadWithOldBvid( account.getCookies(), i, data[1]) else: success, res, upos_uri = Upload.uploadWithNewBvid( account.getCookies(), i, data[1]) if not success: continue upos_uri = upos_uri.split(".")[0] res = json.loads(res) if res["code"] != 0: logger.error(res["message"]) continue with tool.getDB() as db: db.execute( "insert into data(vid,bvid,title,filename) values(?,?,?,?);", (i["id"], res["data"]["bvid"], i["title"], upos_uri)) db.commit() logger.info(f"finished, bvid[{res['data']['bvid']}]") vmer.deleteFile() else: logger.error("download failed")
def run(): user = tool.AccountManager("Anki") cookie = user.getCookies() db = tool.getDB() rs = db.execute( "select distinct bvid from data where zht=false or zhs=false or en=false" ).fetchall() api = "https://api.bilibili.com/x/player/pagelist?bvid=" s = tool.Session() s.cookies.update(cookie) for bvid in rs: bvid = bvid[0] pages = s.get(api + bvid).json() if pages["code"] != 0: # db.execute("delete from data where bvid=?", (bvid, )) continue pages = pages["data"] vid = db.execute("select vid,zht,zhs,en from data where bvid=?", (bvid, )).fetchone() if len(pages) == 1: # 不分P视频 send((bvid, pages[0]["cid"]) + vid, cookie) continue for i in pages: # 分P视频 title = i["part"] vid = db.execute("select vid,zht,zhs,en from data where title=?", (title, )).fetchone() if vid is None or len(vid) == 0: continue send((bvid, i["cid"]) + vid, cookie) db.close() Subtitle.fix_sub(cookie=cookie)
def __consume(): account = tool.AccountManager("Anki") logger = tool.getLogger() logger.debug("start video Consumer") while True: i = buffer.get(block=True) logger.debug(json.dumps(i)) logger.info("start: vid[{}], 1080P[{}], Multipart[{}]".format( i["id"], i["hd"], i["multipart"])) vmer = getVideo.VideoManager(i["id"], i["hd"]) data = vmer.getVideo() if data[0]: if i["multipart"]: success, res, upos_uri = Upload.uploadWithOldBvid( account.getCookies(), i, data[1]) else: success, res, upos_uri = Upload.uploadWithNewBvid( account.getCookies(), i, data[1]) if not success: continue upos_uri = upos_uri.split(".")[0] res = json.loads(res) if res["code"] != 0: logger.error(res["message"]) continue with tool.getDB() as db: db.execute( "insert into data(vid,bvid,title,filename) values(?,?,?,?);", (i["id"], res["data"]["bvid"], i["title"], upos_uri)) db.commit() logger.info(f"finished, bvid[{res['data']['bvid']}]") vmer.deleteFile() else: logger.error("download failed")
def send(i, cookie): db = tool.getDB() if i[3] == 0 and Subtitle.send_subtitle(bvid=i[0], cid=i[1], vid=i[2], cookie=cookie, lan="zh-CN"): db.execute("update data set zht = 1 where cid=(?);", (i[1], )) db.commit() time.sleep(10) if i[4] == 0 and Subtitle.send_subtitle(bvid=i[0], cid=i[1], vid=i[2], cookie=cookie, lan="zh-TW"): db.execute("update data set zhs = 1 where cid=(?);", (i[1], )) db.commit() time.sleep(10) if i[5] == 0 and Subtitle.send_subtitle(bvid=i[0], cid=i[1], vid=i[2], cookie=cookie, lan="en-US"): db.execute("update data set en = 1 where cid=(?);", (i[1], )) db.commit() time.sleep(10) db.close()
def __consume(): account = tool.QRLogin() logger = tool.getLogger() logger.debug("start video Consumer") proxy = tool.settingConf["Proxy"] while True: i = buffer.get(block=True) channelInfo = i.channelParam logger.debug(json.dumps(channelInfo)) logger.info("start: vid[{}], Multipart[{}]".format( channelInfo["id"], channelInfo["multipart"])) # vmer = getVideo.VideoManager(i["id"], i["hd"]) data = i.download(proxy) if data: fpath = i.path() if len(fpath) <= 0: continue if channelInfo["multipart"]: success, res, upos_uri = Upload.uploadWithOldBvid( account.getCookies(), channelInfo, fpath) else: success, res, upos_uri = Upload.uploadWithNewBvid( account.getCookies(), channelInfo, fpath) if not success: continue upos_uri = upos_uri.split(".")[0] res = json.loads(res) if res["code"] != 0: logger.error(res["message"]) continue with tool.getDB() as db: db.execute( "insert into data(vid,bvid,title,filename) values(?,?,?,?);", (channelInfo["id"], res["data"]["bvid"], channelInfo["title"], upos_uri)) db.commit() logger.info(f"finished, bvid[{res['data']['bvid']}]") i.deleteFile() else: logger.error("download failed")
def getYTB(settings: dict) -> list: logger = tool.getLogger() _return = [] if not settings.get("enable", False): return _return api_key = tool.settingConf["GoogleToken"] db = tool.getDB() s = tool.Session() params = { "part": "snippet", settings["type"]: settings["param"], "key": api_key[randint(0, len(api_key) - 1)], "maxResults": settings.get("countPerPage", 10), "order": "date", "type": "video", "pageToken": None } pages = int(settings.get("pages", 1)) url = "https://www.googleapis.com/youtube/v3/search" # if settings["type"] == "q": # url = "https://www.googleapis.com/youtube/v3/search" # elif settings["type"] == "playlistId": # url = "https://www.googleapis.com/youtube/v3/playlistItems" for _ in range(pages): _res: dict = s.get(url, params=params, useProxy=True).json() if _res.get("error") is not None: _res = _res["error"] logger.error(f"code[{_res['code']}],message[{_res['message']}]") logger.error(f"获取视频失败,请检查配置文件setting.yaml") break for __ in _res["items"]: tmp_data = __["snippet"] id_tmp = tmp_data.get("resourceId") or __.get("id") video_id = id_tmp["videoId"] db_res = db.execute("select count(vid) from data where vid=?;", (video_id, )).fetchone()[0] if int(db_res) != 0: # print(video_id) continue tmpTitle = tmp_data["title"] if settings.get("titleTranslate", False): tmpTitle = tool.translateG(tmpTitle) logger.debug(tmpTitle) if not filters(settings, tmpTitle): logger.debug(f"{tmpTitle} not fixed") continue tmpRs = settings.copy() tmpRs.update({ "title": tmpTitle[0:min(80, len(tmpTitle))], # 破站限制长度 "id": video_id, "ptitle": str(settings.get("title", "")).format( title=tmpTitle, ctitle=tmp_data["channelTitle"], ptime=tmp_data["publishedAt"], surl="https://www.youtube.com/watch?v=" + video_id), "desc": str(settings.get("desc", "")).format( title=tmpTitle, ctitle=tmp_data["channelTitle"], ptime=tmp_data["publishedAt"], surl="https://www.youtube.com/watch?v=" + video_id) }) # tmpRs["tags"] = tmpRs.get("tags", "").split(",") ptitle = tmpRs.get("ptitle", "") ptitle = ptitle[0:min(80, len(ptitle))] tmpRs["ptitle"] = ptitle desc = tmpRs.get("desc", "") desc = desc[0:min(250, len(desc))] tmpRs["desc"] = desc _return.append(tmpRs) params["pageToken"] = _res.get("nextPageToken", None) if _res.get("nextPageToken", None) is None: break db.close() s.close() return _return