Esempio n. 1
0
def jobProducer():
    logger = tool.getLogger()
    logger.debug("start video Producer")
    channel = tool.channelConf
    db = tool.getDB()
    workList = []
    for i in channel.data:
        per = channel[i]
        plat = per["platform"]
        bean = VideoFactory.getBean(plat)
        workList += bean.GetVideos(per, tool.settingConf["Platform"][plat])
    try:
        cnt = 0
        for i in workList:
            video_id = i.channelParam["id"]
            db_res = db.execute("select count(vid) from data where vid=?;",
                                (video_id, )).fetchone()[0]
            if int(db_res) != 0:
                # print(video_id)
                continue
            if unique.checkAndInsert(video_id):
                cnt += 1
                buffer.put(i, block=True)
        logger.info(
            f"new: {cnt}, sum: {unique.size()}, rest: {buffer.qsize()}")
    except Exception:
        logger = tool.getLogger()
        logger.error(f"upload-P", exc_info=True)
    db.close()
Esempio n. 2
0
def jobProducer():
    logger = tool.getLogger()
    logger.debug("start video Producer")
    try:
        workList = GetPlayList.get_work_list()
        cnt = 0
        for i in workList:
            if unique.checkAndInsert(i["id"]):
                cnt += 1
                buffer.put(i, block=True)
        logger.info(
            f"new: {cnt}, sum: {unique.size()}, rest: {buffer.qsize()}")
    except Exception:
        logger = tool.getLogger()
        logger.error(f"upload-P", exc_info=True)
Esempio n. 3
0
def run():
    # {"title": tmp_data["title"], "id": video_id, "av": _["av"]}
    work = GetPlayList.get_work_list()
    logger = tool.getLogger()
    account = tool.QRLogin()
    for i in work:
        logger.debug(json.dumps(i))
        logger.info("start: vid[{}], 1080P[{}], Multipart[{}]".format(
            i["id"], i["hd"], i["multipart"]))
        vmer = getVideo.VideoManager(i["id"], i["hd"])
        data = vmer.getVideo()
        if data[0]:
            if i["multipart"]:
                success, res, upos_uri = Upload.uploadWithOldBvid(
                    account.getCookies(), i, data[1])
            else:
                success, res, upos_uri = Upload.uploadWithNewBvid(
                    account.getCookies(), i, data[1])
            if not success:
                continue
            upos_uri = upos_uri.split(".")[0]
            res = json.loads(res)
            if res["code"] != 0:
                logger.error(res["message"])
                continue
            with tool.getDB() as db:
                db.execute(
                    "insert into data(vid,bvid,title,filename) values(?,?,?,?);",
                    (i["id"], res["data"]["bvid"], i["title"], upos_uri))
                db.commit()
            logger.info(f"finished, bvid[{res['data']['bvid']}]")
            vmer.deleteFile()
        else:
            logger.error("download failed")
Esempio n. 4
0
File: main.py Progetto: wuye999/Y2B
def exits(signalNum, frame):
    # print(signalNum, frame)
    logger = tool.getLogger()
    logger.info("等待任务正常结束,请勿强制关闭,避免出现数据丢失!!!")
    tool.settingConf.save()
    job.shutdown()
    sys.exit(0)
Esempio n. 5
0
def __consume():
    account = tool.AccountManager("Anki")
    logger = tool.getLogger()
    logger.debug("start video Consumer")
    while True:
        i = buffer.get(block=True)
        logger.debug(json.dumps(i))
        logger.info("start: vid[{}], 1080P[{}], Multipart[{}]".format(
            i["id"], i["hd"], i["multipart"]))
        vmer = getVideo.VideoManager(i["id"], i["hd"])
        data = vmer.getVideo()
        if data[0]:
            if i["multipart"]:
                success, res, upos_uri = Upload.uploadWithOldBvid(
                    account.getCookies(), i, data[1])
            else:
                success, res, upos_uri = Upload.uploadWithNewBvid(
                    account.getCookies(), i, data[1])
            if not success:
                continue
            upos_uri = upos_uri.split(".")[0]
            res = json.loads(res)
            if res["code"] != 0:
                logger.error(res["message"])
                continue
            with tool.getDB() as db:
                db.execute(
                    "insert into data(vid,bvid,title,filename) values(?,?,?,?);",
                    (i["id"], res["data"]["bvid"], i["title"], upos_uri))
                db.commit()
            logger.info(f"finished, bvid[{res['data']['bvid']}]")
            vmer.deleteFile()
        else:
            logger.error("download failed")
Esempio n. 6
0
def send_subtitle(bvid, lan, cid, cookie, fix=False, vid=None, add=None):
    logger = tool.getLogger()
    _api = "https://api.bilibili.com/x/v2/dm/subtitle/draft/save"
    csrf = cookie["bili_jct"]
    s = tool.Session()
    s.cookies.update(cookie)
    if not fix:
        sou = get_sub(vid, lan)
    else:
        sou = add
    if sou is None:
        return False
    send_data = {
        "type": 1,
        "oid": cid,
        "bvid": bvid,
        "lan": lan,
        "data": sou,
        "submit": "true",
        "sign": "false",
        "csrf": csrf
    }
    # print(re.findall("bili_jct=(.*?);", cookie)[0])
    # print(parse.urlencode(send_data))
    # print(json.dumps(send_data, ensure_ascii=False))
    # s.verify = False
    _res = s.post(url=_api, data=send_data).json()
    if _res["code"] != 0:
        logger.error(str(bvid) + json.dumps(_res))
        return False
    logger.info(
        f"subtitle success BV[{bvid}], Lan[{lan}], fix[{fix}], cid[{cid}]")
    return True
Esempio n. 7
0
 def threadUpload(url, param, part, s):
     logger = tool.getLogger()
     res = s.put(url=upload_url,
                 params=param,
                 data=part,
                 wantStatusCode=200)
     logger.info(f"{param['partNumber']}/{param['chunks']}:{res.text}")
     limit.release()
Esempio n. 8
0
def uploadWithOldBvid(cookie: dict, uploadInfo: dict, videoPath: str):
    logger = tool.getLogger()
    enableParallel = uploadInfo.get("enableParallel", True)
    success, upos_uri = uploadFile(cookie,
                                   videoPath,
                                   enableParallel=enableParallel)
    if not success:
        return False, "", ""
    s = tool.Session()
    s.cookies.update(cookie)

    url = f"https://member.bilibili.com/x/vu/web/edit?csrf={cookie['bili_jct']}"

    # s.headers.pop("X-Upos-Auth")
    _rs = s.get(
        f"https://member.bilibili.com/x/web/archive/view?bvid={uploadInfo['bvid']}"
    ).json()["data"]
    # logger.debug(json.dumps(_rs["videos"]))
    videos = []
    for i in _rs["videos"]:
        if len(i['reject_reason']) > 0:  # 判断视频是否有错误,比如撞车、解码错误、违法违规等
            logger.debug("{}-{}:{}".format(i["aid"], i["cid"],
                                           i["reject_reason"]))
            continue
        videos.append({"filename": i["filename"], "title": i["title"]})
    videos.append({
        "filename":
        upos_uri.split(".")[0],
        "title":
        uploadInfo["title"][0:min(79, len(uploadInfo["title"]))],
        "desc":
        uploadInfo["id"]
    })
    send_data = {
        "copyright": 2,
        "videos": videos,
        "source": _rs["archive"]["source"],
        "tid": _rs["archive"]["tid"],
        "cover": _rs["archive"]["cover"].split(":")[-1],
        "title": _rs["archive"]["title"],
        "tag": _rs["archive"]["tag"],
        "desc_format_id": 0,
        "desc": _rs["archive"]["desc"],
        "dynamic": _rs["archive"]["dynamic"],
        "subtitle": {
            "open": 0,
            "lan": ""
        },
        "bvid": uploadInfo["bvid"],
        "handle_staff": False,
    }
    logger.debug(json.dumps(send_data))
    # s.headers.update({"Content-Type": "application/json;charset=UTF-8"})
    res = s.post(url=url, json=send_data).text
    logger.debug(res)
    return True, res, upos_uri
Esempio n. 9
0
def uploadWithNewBvid(cookie: dict, uploadInfo: dict, videoPath: str):
    logger = tool.getLogger()
    enableParallel = uploadInfo.get("enableParallel", False)
    success, upos_uri = uploadFile(
        cookie, videoPath, enableParallel=enableParallel)
    if not success:
        return False, "", ""
    s = tool.Session()
    s.cookies.update(cookie)
    csrf = cookie["bili_jct"]

    def cover(csrf, uploadInfo):
        vid = uploadInfo["id"]
        __url = "https://member.bilibili.com/x/vu/web/cover/up"
        __imgURL = f"https://i1.ytimg.com/vi/{vid}/maxresdefault.jpg"
        __imgURL2 = f"https://i1.ytimg.com/vi/{vid}/hqdefault.jpg"
        __rs = s.get(__imgURL, useProxy=True, wantStatusCode=200)
        if __rs is None:
            __rs = s.get(__imgURL2, useProxy=True, wantStatusCode=200)
        __send = {"cover": "data:image/jpeg;base64," +
                  base64.b64encode(__rs.content).decode(),
                  "csrf": csrf
                  }
        __res = s.post(url=__url, data=__send).json()

        return __res["data"]["url"].replace("http:", "").replace("https:", "")

    url = "https://member.bilibili.com/x/vu/web/add?csrf=" + csrf
    # s.headers.pop("X-Upos-Auth")
    _data = s.get("https://member.bilibili.com/x/geetest/pre/add").text
    logger.debug(_data)
    send_data = {"copyright": 2,
                 "videos": [{"filename": upos_uri.split(".")[0],
                             "title": uploadInfo["title"],
                             "desc": ""}],
                 "source": "https://www.youtube.com/watch?v=" + uploadInfo["id"],
                 "tid": int(uploadInfo["tid"]),
                 "cover": cover(csrf, uploadInfo),
                 "title": uploadInfo["ptitle"],
                 "tag": ','.join(uploadInfo["tags"]),
                 "desc_format_id": 0,
                 "desc": uploadInfo["desc"],
                 "dynamic": "#" + "##".join(uploadInfo["tags"]) + "#",
                 "subtitle": {
                        "open": 0,
                        "lan": ""}
                 }
    logger.debug(json.dumps(send_data))
    # s.headers.update({"Content-Type": "application/json;charset=UTF-8"})
    res = s.post(url=url, json=send_data).text
    logger.debug(res)
    s.close()
    return True, res, upos_uri
Esempio n. 10
0
def __consume():
    account = tool.QRLogin()
    logger = tool.getLogger()
    logger.debug("start video Consumer")
    proxy = tool.settingConf["Proxy"]
    while True:
        i = buffer.get(block=True)
        channelInfo = i.channelParam
        logger.debug(json.dumps(channelInfo))
        logger.info("start: vid[{}], Multipart[{}]".format(
            channelInfo["id"], channelInfo["multipart"]))
        # vmer = getVideo.VideoManager(i["id"], i["hd"])
        data = i.download(proxy)

        if data:
            fpath = i.path()
            if len(fpath) <= 0:
                continue
            if channelInfo["multipart"]:
                success, res, upos_uri = Upload.uploadWithOldBvid(
                    account.getCookies(), channelInfo, fpath)
            else:
                success, res, upos_uri = Upload.uploadWithNewBvid(
                    account.getCookies(), channelInfo, fpath)
            if not success:
                continue
            upos_uri = upos_uri.split(".")[0]
            res = json.loads(res)
            if res["code"] != 0:
                logger.error(res["message"])
                continue
            with tool.getDB() as db:
                db.execute(
                    "insert into data(vid,bvid,title,filename) values(?,?,?,?);",
                    (channelInfo["id"], res["data"]["bvid"],
                     channelInfo["title"], upos_uri))
                db.commit()
            logger.info(f"finished, bvid[{res['data']['bvid']}]")
            i.deleteFile()
        else:
            logger.error("download failed")
Esempio n. 11
0
def fix_sub(cookie):
    logger = tool.getLogger()
    csrf = cookie["bili_jct"]
    s = tool.Session()
    s.cookies.update(cookie)
    wait_api = "https://api.bilibili.com/x/v2/dm/subtitle/search/author/list?status=3&page=1&size=100"
    res = s.get(wait_api).json()["data"]["subtitles"]
    if res is None:
        return
    for _ in res:
        tmp_url = f"https://api.bilibili.com/x/v2/dm/subtitle/show?oid={_['oid']}&subtitle_id={_['id']}"
        data = s.get(tmp_url).json()["data"]
        reject_comment = data["reject_comment"].split(':')[-1].split(',')
        subtitle_url = data["subtitle_url"]
        sub = s.get(subtitle_url).text
        for i in reject_comment:
            if "zh" in _["lan"]:
                sub = sub.replace(
                    i,
                    "".join(lazy_pinyin(i.replace('#', ""), style=Style.TONE)))
            else:
                sub = sub.replace(i, "#".join(i))
        if send_subtitle(_["bvid"],
                         lan=_["lan"],
                         cid=_["oid"],
                         cookie=cookie,
                         fix=True,
                         add=sub):
            res = s.post("https://api.bilibili.com/x/v2/dm/subtitle/del",
                         data={
                             "oid": _["oid"],
                             "csrf": csrf,
                             "subtitle_id": _["id"]
                         }).json()
            if res["code"] != 0:
                logger.error(res["message"])
            else:
                logger.info(f"fix done:{_['oid']}")
        time.sleep(10)
Esempio n. 12
0
    def GetVideos(channel: dict, settings: dict = {}) -> List[Video]:
        def getKey(item):
            return item["snippet"]["publishedAt"]

        logger = tool.getLogger()
        _return = []
        if not channel.get("enable", False):
            return _return
        api_key = settings["GoogleToken"]
        s = tool.Session()
        params = {
            "part": "snippet",
            channel["type"]: channel["param"],
            "key": api_key[randint(0,
                                   len(api_key) - 1)],
            # "maxResults": settings.get("countPerPage", 10),
            "maxResults": 50,
            "order": "date",
            "pageToken": None
        }
        if channel["type"] == "q":
            url = "https://www.googleapis.com/youtube/v3/search"
        elif channel["type"] == "playlistId":
            url = "https://www.googleapis.com/youtube/v3/playlistItems"
        _res: dict = s.get(url, params=params, useProxy=True).json()
        if _res.get("error") is not None:
            _res = _res["error"]
            logger.error(f"code[{_res['code']}],message[{_res['message']}]")
            logger.error(f"获取视频失败,请检查配置文件setting.yaml,或可能为配额已用完")
            return []
        _res["items"].sort(key=getKey, reverse=True)
        for __ in _res["items"][0:channel.get("countPerPage", 10)]:
            tmp_data = __["snippet"]
            id_tmp = tmp_data.get("resourceId") or __.get("id")
            video_id = id_tmp["videoId"]
            tmpTitle = tmp_data["title"]
            stitle = tmp_data["title"]
            tmpDesc = tmp_data["description"]
            if channel.get("titleTranslate", False):
                tmpTitle = tool.translateG(tmpTitle)
            logger.debug(tmpTitle)
            # if not filters(channel, tmpTitle):
            #     logger.debug(f"{tmpTitle} not fixed")
            #     continue
            tmpRs = channel.copy()
            tmpRs.update({
                "title":
                tmpTitle[0:min(80, len(tmpTitle))],  # 破站限制长度
                "id":
                video_id,
                "url":
                f"https://www.youtube.com/watch?v={video_id}",
                "ptitle":
                str(channel.get("title", "")).format(
                    title=tmpTitle,
                    ctitle=tmp_data["channelTitle"],
                    ptime=tmp_data["publishedAt"],
                    surl=f"https://www.youtube.com/watch?v={video_id}",
                    stitle=stitle,
                    sdesc=tmpDesc),
                "desc":
                str(channel.get("desc", "")).format(
                    title=tmpTitle,
                    ctitle=tmp_data["channelTitle"],
                    ptime=tmp_data["publishedAt"],
                    surl=f"https://www.youtube.com/watch?v={video_id}",
                    stitle=stitle,
                    sdesc=tmpDesc)
            })
            # tmpRs["tags"] = tmpRs.get("tags", "").split(",")

            ptitle = tmpRs.get("ptitle", "")
            ptitle = ptitle[0:min(80, len(ptitle))]
            tmpRs["ptitle"] = ptitle

            desc = tmpRs.get("desc", "")
            desc = desc[0:min(250, len(desc))]
            tmpRs["desc"] = desc
            _return.append(Video(channelParam=tmpRs))
        s.close()
        return _return
Esempio n. 13
0
def getYTB(settings: dict) -> list:
    logger = tool.getLogger()
    _return = []
    if not settings.get("enable", False):
        return _return
    api_key = tool.settingConf["GoogleToken"]
    db = tool.getDB()
    s = tool.Session()
    params = {
        "part": "snippet",
        settings["type"]: settings["param"],
        "key": api_key[randint(0,
                               len(api_key) - 1)],
        "maxResults": settings.get("countPerPage", 10),
        "order": "date",
        "type": "video",
        "pageToken": None
    }
    pages = int(settings.get("pages", 1))
    url = "https://www.googleapis.com/youtube/v3/search"
    # if settings["type"] == "q":
    #     url = "https://www.googleapis.com/youtube/v3/search"
    # elif settings["type"] == "playlistId":
    #     url = "https://www.googleapis.com/youtube/v3/playlistItems"
    for _ in range(pages):
        _res: dict = s.get(url, params=params, useProxy=True).json()
        if _res.get("error") is not None:
            _res = _res["error"]
            logger.error(f"code[{_res['code']}],message[{_res['message']}]")
            logger.error(f"获取视频失败,请检查配置文件setting.yaml")
            break
        for __ in _res["items"]:
            tmp_data = __["snippet"]
            id_tmp = tmp_data.get("resourceId") or __.get("id")
            video_id = id_tmp["videoId"]
            db_res = db.execute("select count(vid) from data where vid=?;",
                                (video_id, )).fetchone()[0]
            if int(db_res) != 0:
                # print(video_id)
                continue
            tmpTitle = tmp_data["title"]
            if settings.get("titleTranslate", False):
                tmpTitle = tool.translateG(tmpTitle)
            logger.debug(tmpTitle)
            if not filters(settings, tmpTitle):
                logger.debug(f"{tmpTitle} not fixed")
                continue
            tmpRs = settings.copy()
            tmpRs.update({
                "title":
                tmpTitle[0:min(80, len(tmpTitle))],  # 破站限制长度
                "id":
                video_id,
                "ptitle":
                str(settings.get("title", "")).format(
                    title=tmpTitle,
                    ctitle=tmp_data["channelTitle"],
                    ptime=tmp_data["publishedAt"],
                    surl="https://www.youtube.com/watch?v=" + video_id),
                "desc":
                str(settings.get("desc", "")).format(
                    title=tmpTitle,
                    ctitle=tmp_data["channelTitle"],
                    ptime=tmp_data["publishedAt"],
                    surl="https://www.youtube.com/watch?v=" + video_id)
            })
            # tmpRs["tags"] = tmpRs.get("tags", "").split(",")

            ptitle = tmpRs.get("ptitle", "")
            ptitle = ptitle[0:min(80, len(ptitle))]
            tmpRs["ptitle"] = ptitle

            desc = tmpRs.get("desc", "")
            desc = desc[0:min(250, len(desc))]
            tmpRs["desc"] = desc

            _return.append(tmpRs)
        params["pageToken"] = _res.get("nextPageToken", None)
        if _res.get("nextPageToken", None) is None:
            break
    db.close()
    s.close()
    return _return
Esempio n. 14
0
    def getVideo(self):
        rs = {}
        proxy = tool.settingConf.get("Proxy")
        jsonrpc = tool.settingConf["Aria"]["jsonrpc"]
        ffmpegArgs = tool.settingConf["FFMPEG"]["args"]
        ffmpegPath = tool.settingConf["FFMPEG"]["path"]
        logger = tool.getLogger()
        opt = {"logger": logger}
        if proxy is not None:
            opt["proxy"] = proxy
        _tmpRs: dict = None
        try:
            ydl = youtube_dl.YoutubeDL(opt)
            _tmpRs = ydl.extract_info(
                f"https://www.youtube.com/watch?v={self.vid}", download=False)
        except:
            logger.info(f"[{self.vid}] youtube-dl failed, try another way...")
        try:
            if _tmpRs is None:
                _tmpRs = self.getVideoUrl()
        except:
            logger.info(f"[{self.vid}] another way failed, noway..")
            logger.debug("", exc_info=True)
            return False, ""
        headers: dict = None
        for i in _tmpRs["formats"]:
            rs[i["format_id"]] = i
            headers = i["http_headers"]

        urlv = None
        urls = None
        for i in ["299", "137", "298"]:
            if self._hd and rs.get(i) is not None:
                if rs[i]["protocol"] == "http_dash_segments":
                    # urlv = rs[i]['fragment_base_url']
                    logger.error("分段视频,暂未支持")
                    return False, ""
                else:
                    urlv = rs[i]["url"]
                break

        for i in ["141", "140", "139"]:
            if urlv is not None and rs.get(i) is not None:
                if rs[i]["protocol"] == "http_dash_segments":
                    # urls = rs[i]['fragment_base_url']
                    logger.error("分段视频,暂未支持")
                    return False, ""
                else:
                    urls = rs[i]["url"]
                break

        if urlv is None or urls is None:
            urlv = None
            urls = None
            for i in ["22", "18"]:
                if rs.get(i) is not None:
                    if rs[i]["protocol"] == "http_dash_segments":
                        urlv = rs[i]['fragment_base_url']
                    else:
                        urlv = rs[i]["url"]
                    break

        logger.info(f"{self.vid}:v[{urlv is not None}],a[{urls is not None}]")
        logger.debug(f"v[{urlv}]")
        logger.debug(f"a[{urls}]")
        if urlv is None:
            return False, ""
        cmd = ffmpegPath + ' -i "{}" -i "{}" ' + ffmpegArgs + ' "{}"'

        self._dmer1 = tool.DownloadManager(
            urlv,
            proxy=proxy,
            jsonrpc=jsonrpc,
            files=self.vid + "_v",
            headers=headers)
        self._dmer1.download()
        if urls is not None:
            self._dmer2 = tool.DownloadManager(
                urls,
                proxy=proxy,
                jsonrpc=jsonrpc,
                files=self.vid + "_s",
                headers=headers)
            self._dmer2.download()
            if self._dmer2.waitForFinishing() != 1:
                return False, ""
        if self._dmer1.waitForFinishing() != 1:
            return False, ""

        if self._dmer2 is not None:
            _a = self._dmer2.telFileLocate()
            _v = self._dmer1.telFileLocate()
            self._o = self._dmer1.getDirs() + self.vid + "_merged.mp4"
            if os.path.exists(self._o):
                os.remove(self._o)
            nowCmd = cmd.format(_a, _v, self._o)
            logger.info("cmd: " + nowCmd)
            cmdRes = os.system(nowCmd)
            logger.info(f"ffmpeg result:{cmdRes}")
            if cmdRes != 0:
                return False, ""
            return True, self._o
        else:
            return True, self._dmer1.telFileLocate()
Esempio n. 15
0
def uploadFile(cookie: dict, videoPath: str, enableParallel=False) -> str:
    logger = tool.getLogger()
    logger.info(f"start {videoPath}")
    file_size = os.path.getsize(videoPath)
    s = tool.Session()
    s.cookies.update(cookie)
    s.headers.update({
        "Origin": "https://member.bilibili.com",
        "Referer": "https://member.bilibili.com/video/upload.html",
    })
    limit: threading.Semaphore = None
    limitCnt = 0
    upos: str = None
    upcdn: str = None
    cost: float = 99999999
    rs = s.get("https://member.bilibili.com/preupload?r=probe",
               wantStatusCode=200).json()
    testContent = b'\0' * 1048576
    for i in rs["lines"]:
        testURL = f"https:{i['probe_url']}"
        start = time.time()
        tRs = s.put(testURL, data=testContent)
        LCost = time.time() - start
        if tRs.status_code == 200 and "NGINX_OK" in tRs.text and LCost < cost:
            cost = LCost
            upos = i["os"]
            upcdn = i["query"]
    del testContent
    if upcdn is None or upcdn is None:
        return False, ""
    upcdn = re.findall("upcdn=([^&]+)", upcdn)[0]
    logger.debug(f"upos[{upos}],cdn[{upcdn}]")

    param = {
        "name": "{}.mp4".format(int(time.time())),
        "size": file_size,
        "r": upos,
        "profile": "ugcupos/bup",
        "ssl": "0",
        "version": "2.7.1",
        "build": "2070100",
        "upcdn": upcdn,
        "probe_version": "20200427",
    }
    url = "https://member.bilibili.com/preupload"
    _data = s.get(url=url, params=param).text
    logger.debug(_data)
    _data = json.loads(_data)
    upload_size = _data["chunk_size"]
    upos_uri = _data["upos_uri"].replace("upos:/", "").replace("/ugc/", "")
    biz_id = _data["biz_id"]
    endpoint = _data["endpoint"]
    auth = _data["auth"]
    if enableParallel:
        limit = threading.Semaphore(_data["threads"])
        limitCnt = _data["threads"]
        logger.info("use parallel upload, count:{}".format(_data["threads"]))

    logger.info("preupload done")
    # get upload id
    data_url = f"https:{endpoint}/ugc/{upos_uri}?uploads&output=json"
    s.headers.update({"X-Upos-Auth": auth})
    # while True:
    #     try:
    #         _data = s.post(url=data_url).json()
    #         upload_id = _data["upload_id"]
    #         break
    #     except (IndexError, KeyError):
    #         time.sleep(2)
    #         continue
    _data = s.post(url=data_url).json()
    upload_id = _data["upload_id"]
    logger.debug(json.dumps(_data))
    logger.info("get upload id done")
    # start upload
    # upload_size = 8 * 1024 * 1024
    upload_url = f"https:{endpoint}/ugc/{upos_uri}"
    total_chunk = math.ceil(file_size / upload_size)
    index = 1
    now_size = 0
    restore = {"parts": []}

    file = open(videoPath, "rb")
    # 分块下载&上传
    while now_size < file_size:
        new_end = min(now_size + upload_size, file_size - 1)
        part = file.read(upload_size)
        size = len(part)
        param = {
            "total": file_size,
            "partNumber": index,
            "uploadId": upload_id,
            "chunk": index - 1,
            "chunks": total_chunk,
            "size": size,
            "start": now_size,
            "end": new_end
        }
        now_size = new_end + 1
        index += 1

        def threadUpload(url, param, part, s):
            logger = tool.getLogger()
            res = s.put(url=upload_url, params=param,
                        data=part, wantStatusCode=200)
            logger.info(f"{param['partNumber']}/{param['chunks']}:{res.text}")
            limit.release()
        if enableParallel:
            limit.acquire()
            tool.Thread(target=threadUpload, args=(
                upload_url, param.copy(), part, s)).start()
        else:
            res = s.put(url=upload_url, params=param,
                        data=part, wantStatusCode=200)
            logger.info(f"{index - 1}/{total_chunk}:{res.text}")
        restore["parts"].append({"partNumber": index, "eTag": "etag"})
    file.close()
    for _ in range(limitCnt):
        if not limit.acquire(timeout=60 * 20):
            return False, ""
    del limit
    # 上传完成
    param = {
        'output': 'json',
        'name': time.ctime() + ".mp4",
        'profile': 'ugcupos/bup',
        'uploadId': upload_id,
        'biz_id': biz_id,
    }
    _data = s.post(upload_url, params=param, json=restore).text
    logger.info(f"upload file done: {upos_uri}")
    logger.debug(_data)
    return True, upos_uri
Esempio n. 16
0
File: main.py Progetto: wuye999/Y2B
def handleException(exp):
    logger = tool.getLogger()
    logger.error("", exc_info=True)
Esempio n. 17
0
File: bean.py Progetto: wuye999/Y2B
 def __init__(self, youtube_dlParams: dict = {}, channelParam: dict = {}):
     self.channelParam: dict = channelParam.copy()
     self.youtube_dlParams: dict = youtube_dlParams.copy()
     self.__uniq = channelParam["id"]
     self.__name = f"./tmp/{self.__uniq}.%(ext)s"
     self.__log = tool.getLogger()
Esempio n. 18
0
File: main.py Progetto: wuye999/Y2B
import datetime
import main_upload
import main_sub
import signal
from utility import tool
import sys
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler import events

tool.checkChannel()

job = BlockingScheduler(logger=tool.getLogger())


def exits(signalNum, frame):
    # print(signalNum, frame)
    logger = tool.getLogger()
    logger.info("等待任务正常结束,请勿强制关闭,避免出现数据丢失!!!")
    tool.settingConf.save()
    job.shutdown()
    sys.exit(0)


def handleException(exp):
    logger = tool.getLogger()
    logger.error("", exc_info=True)


# signal.signal(signal.SIGINT, exits)
# signal.signal(signal.SIGTERM, exits)