Example #1
0
async def nijisanji_main(DatabaseConn: VTBiliDatabase):
    vtlog.info("Fetching bili calendar data...")
    calendar_data = await fetch_bili_calendar()

    vtlog.info("Updating database...")
    upd_data = {"upcoming": calendar_data}
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("nijisanji_data", upd_data), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error("Failed to update upcoming data, timeout by 15s...")
Example #2
0
async def others_main(DatabaseConn: VTBiliDatabase, dataset_path: str):
    async with aiofiles.open(dataset_path, "r", encoding="utf-8") as fp:
        channels_dataset = ujson.loads(await fp.read())

    CHAN_BILI_UIDS = [chan["uid"] for chan in channels_dataset]
    vtlog.info("Fetching bili calendar data...")
    calendar_data = await fetch_bili_calendar(CHAN_BILI_UIDS)

    vtlog.info("Updating database...")
    upd_data = {"upcoming": calendar_data}
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("otherbili_data", upd_data), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error("Failed to update upcoming data, timeout by 15s...")
async def update_channels_stats(DatabaseConn: VTBiliDatabase,
                                dataset_set: list):
    vtlog.info("Collecting channel UUIDs")
    channels_uids = []
    for chan in dataset_set:
        vtlog.debug(f"Opening: {chan}")
        async with aiofiles.open(chan, "r", encoding="utf-8") as fp:
            dds = ujson.loads(await fp.read())
        vtlog.debug(f"Total data: {len(dds)}")
        for nn, dd in enumerate(dds):
            channels_uids.append({"id": dd["id"], "uid": dd["uid"], "num": nn})

    vtlog.info("Processing...")
    final_data = await main_process_loop(channels_uids)
    vtlog.info("Updating DB data for Hololive...")
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("hololive_data",
                                     {"channels": final_data["hololive"]}),
            15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to update Hololive channels data, timeout by 15s...")
    vtlog.info("Updating DB data for Nijisanji...")
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("nijisanji_data",
                                     {"channels": final_data["nijisanji"]}),
            15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to update Nijisanji channels data, timeout by 15s...")
    vtlog.info("Updating DB data for Others...")
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("otherbili_data",
                                     {"channels": final_data["other"]}), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error("Failed to update Others channels data, timeout by 15s...")
Example #4
0
async def twitcasting_heartbeat(DatabaseConn: VTBiliDatabase,
                                twitcast_data: list):
    sessions = aiohttp.ClientSession(headers={"User-Agent": CHROME_UA})

    vtlog.info("Collecting IDs...")
    twitcast_id = [twit["id"] for twit in twitcast_data]

    vtlog.info("Creating tasks...")
    twitcast_tasks = [
        check_status(sessions, {
            "u": uid,
            "v": 999
        }, uid) for uid in twitcast_id
    ]

    tmri = lambda t: int(round(t))  # noqa: E731

    twitcasting_live_data = []
    current_time = datetime.now(tz=timezone.utc).timestamp()
    vtlog.info("Running all tasks...")
    for twit_task in asyncio.as_completed(twitcast_tasks):
        twit_res, channel = await twit_task
        vtlog.info(f"|-- Checking {channel} heartbeat")
        if not twit_res:
            vtlog.error(
                f"|--! Failed to fetch info for {channel}, skipping...")
            continue

        tw_list = twit_res.split("\t")

        tw_sid = tw_list[0]
        if not tw_sid:
            continue
        if tw_sid == "7":
            continue

        tw_time_passed = int(tw_list[6])
        tw_max_viewers = int(tw_list[5])
        tw_current_viewers = int(tw_list[3])

        tw_title = unquote(tw_list[7]).strip()

        if tw_title == "":
            tw_title = f"Radio Live #{tw_sid}"

        tw_start_time = tmri(current_time - tw_time_passed)

        dataset = {
            "id": tw_sid,
            "title": tw_title,
            "startTime": tw_start_time,
            "channel": channel,
            "viewers": tw_current_viewers,
            "peakViewers": tw_max_viewers,
            "platform": "twitcasting",
        }
        twitcasting_live_data.append(dataset)

    if twitcasting_live_data:
        twitcasting_live_data.sort(key=lambda x: x["startTime"])

    vtlog.info("Updating database...")
    upd_data = {"live": twitcasting_live_data}
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("twitcasting_data", upd_data), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to update twitcasting live data, timeout by 15s...")
    await sessions.close()
async def holo_heartbeat(DatabaseConn: VTBiliDatabase, JetriConn: Jetri,
                         room_dataset: dict):
    session = aiohttp.ClientSession(headers={"User-Agent": CHROME_UA})

    vtlog.info("Fetching local youtube data...")
    holo_lives, holo_upcome = await JetriConn.fetch_lives()

    vtlog.info("Collecting live channels on youtube...")
    collect_live_channels = []
    for live_data in holo_lives:
        ch_id = live_data["channel"]
        if ch_id not in collect_live_channels:
            vtlog.debug(f"|--> Adding: {ch_id}")
            collect_live_channels.append(ch_id)
    for up_data in holo_upcome:
        ch_id = up_data["channel"]
        current_time = datetime.now(tz=timezone.utc).timestamp() - 300
        if current_time >= up_data["startTime"]:
            if ch_id not in collect_live_channels:
                vtlog.debug(f"|--> Adding: {ch_id}")
                collect_live_channels.append(ch_id)

    holo_data: dict = room_dataset["holo"]
    vtlog.info("Fetching ignored room data from database...")
    # await DatabaseConn.fetch_data("hololive_ignored")
    is_db_fetched = False
    try:
        db_holo_ignored: dict = await asyncio.wait_for(
            DatabaseConn.fetch_data("hololive_ignored"), 15.0)
        is_db_fetched = True
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to fetch Hololive Ignored database, using blank data.")
        db_holo_ignored = {"data": []}
    holo_ignored: list = db_holo_ignored["data"]

    vtlog.info("Creating tasks to check room status...")
    room_to_fetch = [fetch_room(session, room) for room in holo_data.keys()]
    vtlog.info("Firing API requests!")
    final_results = []
    for froom in asyncio.as_completed(room_to_fetch):
        room_data, room_id = await froom
        vtlog.debug(f"|-- Checking heartbeat for: {room_id}")
        if not room_data:
            vtlog.warn(f"|--! Failed fetching Room ID: {room_id} skipping")
            continue
        if room_data["live_status"] != 1:
            continue
        thumbnail = room_data["user_cover"]
        viewers = room_data["online"]
        start_time = int(
            round(
                datetime.strptime(room_data["live_time"] + " +0800",
                                  "%Y-%m-%d %H:%M:%S %z").timestamp())) - (
                                      8 * 60 * 60)  # Set to UTC
        gen_id = f"bili{room_id}_{start_time}"
        if gen_id in holo_ignored:
            vtlog.warn(f"Ignoring {room_id} since it's an Ignored restream...")
            continue
        if str(room_id) in holo_data:
            holo_map = holo_data[str(room_id)]
            if "id" in holo_map and holo_map["id"] in collect_live_channels:
                vtlog.warn(
                    f"Ignoring {room_id} since it's a YouTube restream...")
                if gen_id not in holo_ignored:
                    holo_ignored.append(gen_id)
                continue
        vtlog.info(f"Adding room_id: {room_id}")
        dd = {
            "id": gen_id,
            "room_id": int(room_id),
            "title": room_data["title"],
            "startTime": start_time,
            "channel": str(room_data["uid"]),
            "channel_name": holo_data[str(room_id)]["name"],
            "thumbnail": thumbnail,
            "viewers": viewers,
            "platform": "bilibili",
        }
        final_results.append(dd)

    if final_results:
        final_results.sort(key=lambda x: x["startTime"])

    vtlog.info("Updating database...")
    upd_data = {"live": final_results}
    upd_data2 = {"data": holo_ignored}
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("hololive_data", upd_data), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to update Hololive Heartbeat data, timeout by 15s...")
    if is_db_fetched:
        try:
            await asyncio.wait_for(
                DatabaseConn.update_data("hololive_ignored", upd_data2), 15.0)
        except asyncio.TimeoutError:
            await DatabaseConn.release()
            DatabaseConn.raise_error()
            vtlog.error(
                "Failed to update Hololive ignored database, timeout by 15s..."
            )
    await session.close()
async def niji_heartbeat(DatabaseConn: VTBiliDatabase,
                         VTNijiConn: VTBiliDatabase, room_dataset: dict):
    session = aiohttp.ClientSession(headers={"User-Agent": CHROME_UA})

    vtlog.info("Fetching currently live/upcoming data from VTNiji Database...")
    collect_live_channels: list = []
    try:
        niji_yt_puredata = await asyncio.wait_for(
            VTNijiConn.fetch_data("nijitube_live"), 15.0)
        del niji_yt_puredata["_id"]
        for channel_id, channel_data in niji_yt_puredata.items():
            for vtu in channel_data:
                current_time = datetime.now(tz=timezone.utc).timestamp() - 300
                if vtu["status"] == "live":
                    if channel_id not in collect_live_channels:
                        collect_live_channels.append(channel_id)
                elif vtu["status"] == "upcoming":
                    if current_time >= vtu["startTime"]:
                        if channel_id not in collect_live_channels:
                            collect_live_channels.append(channel_id)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to fetch live/upcoming data from VTNiji Database, timeout by 15s..."
        )
    niji_data: dict = room_dataset["niji"]
    vtlog.info("Fetching ignored room data from database...")
    is_db_fetched = False
    try:
        db_niji_ignored: dict = await asyncio.wait_for(
            DatabaseConn.fetch_data("nijisanji_ignored"), 15.0)
        is_db_fetched = True
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to fetch Nijisanji Ignored database, using blank data.")
        db_niji_ignored = {"data": []}
    niji_ignored: list = db_niji_ignored["data"]

    vtlog.info("Creating tasks to check room status...")
    room_to_fetch = [fetch_room(session, room) for room in niji_data.keys()]
    vtlog.info("Firing API requests!")
    final_results = []
    for froom in asyncio.as_completed(room_to_fetch):
        room_data, room_id = await froom
        vtlog.debug(f"|-- Checking heartbeat for: {room_id}")
        if not room_data:
            vtlog.warn(f"|--! Failed fetching Room ID: {room_id} skipping")
            continue
        if room_data["live_status"] != 1:
            continue
        thumbnail = room_data["user_cover"]
        viewers = room_data["online"]
        start_time = int(
            round(
                datetime.strptime(room_data["live_time"] + " +0800",
                                  "%Y-%m-%d %H:%M:%S %z").timestamp())) - (
                                      8 * 60 * 60)  # Set to UTC.
        gen_id = f"bili{room_id}_{start_time}"
        if gen_id in niji_ignored:
            vtlog.warn(f"Ignoring {room_id} since it's an Ignored restream...")
            continue
        if str(room_id) in niji_data:
            niji_map = niji_data[str(room_id)]
            if "id" in niji_map and niji_map["id"] in collect_live_channels:
                vtlog.warn(
                    f"Ignoring {room_id} since it's a YouTube restream...")
                if gen_id not in niji_ignored:
                    niji_ignored.append(gen_id)
                continue
        # hls_list, _ = await fetch_room_hls(session, str(room_id))
        dd = {
            "id": gen_id,
            "room_id": int(room_id),
            "title": room_data["title"],
            "startTime": start_time,
            "channel": str(room_data["uid"]),
            "channel_name": niji_data[str(room_id)]["name"],
            "thumbnail": thumbnail,
            "viewers": viewers,
            "platform": "bilibili",
        }
        final_results.append(dd)

    if final_results:
        final_results.sort(key=lambda x: x["startTime"])

    vtlog.info("Updating database...")
    upd_data = {"live": final_results}
    upd_data2 = {"data": niji_ignored}
    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("nijisanji_data", upd_data), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error(
            "Failed to update Nijisanji Heartbeat data, timeout by 15s...")
    if is_db_fetched:
        try:
            await asyncio.wait_for(
                DatabaseConn.update_data("nijisanji_ignored", upd_data2), 15.0)
        except asyncio.TimeoutError:
            await DatabaseConn.release()
            DatabaseConn.raise_error()
            vtlog.error(
                "Failed to update Nijisanji ignored database, timeout by 15s..."
            )
    await session.close()
Example #7
0
async def youtube_video_feeds(DatabaseConn: VTBiliDatabase, dataset: dict,
                              yt_api_key: RotatingAPIKey):
    sessions = aiohttp.ClientSession(
        headers={"User-Agent": "VTBSchedule/0.9.0"})

    vtlog.info("Fetching saved live data...")
    try:
        youtube_lives_data: dict = await asyncio.wait_for(
            DatabaseConn.fetch_data("yt_other_livedata"), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error("Failed to fetch youtube live database, skipping run.")
        await sessions.close()
        return
    del youtube_lives_data["_id"]

    vtlog.info("Fetching all fetched video IDs...")
    fetched_video_ids: dict = {}
    for channel, channel_data in youtube_lives_data.items():
        if channel not in fetched_video_ids:
            fetched_video_ids[channel] = []
        for video in channel_data:
            if video["id"] not in fetched_video_ids[channel]:
                fetched_video_ids[channel].append(video["id"])

    try:
        ended_video_ids = await asyncio.wait_for(
            DatabaseConn.fetch_data("yt_other_ended_ids"), 15.0)
        del ended_video_ids["_id"]
        for channel, channel_data in ended_video_ids.items():
            if channel not in fetched_video_ids:
                fetched_video_ids[channel] = []
            for video in channel_data:
                if video not in fetched_video_ids[channel]:
                    fetched_video_ids[channel].append(video)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.warning(
            "Failed to fetch youtube ended id database, skipping run.")
        await sessions.close()
        return

    vtlog.info("Creating job task for xml files.")
    xmls_to_fetch = [
        fetch_xmls(sessions, chan["id"], chan["affiliates"], nn)
        for nn, chan in enumerate(dataset)
    ]
    collected_videos_ids = {}
    vtlog.info("Firing xml fetching!")
    for xmls in asyncio.as_completed(xmls_to_fetch):
        feed_results, channel, affliate, nn = await xmls

        fetched_videos = []
        if channel in fetched_video_ids:
            fetched_videos = fetched_video_ids[channel]

        vtlog.info(f"|=> Processing XMLs: {dataset[nn]['name']}")
        video_ids = []
        for entry in feed_results.entries:
            ids_ = entry["yt_videoid"]
            if ids_ not in fetched_videos:
                video_ids.append(ids_)

        collected_videos_ids[channel + "//" + affliate] = video_ids

    vtlog.info("Collected!")
    vtlog.info("Now creating tasks for a non-fetched Video IDs to the API.")

    video_to_fetch = []
    for chan_aff, videos in collected_videos_ids.items():
        chan, aff = chan_aff.split("//")
        if not videos:
            vtlog.warn(f"Skipping: {chan} since there's no video to fetch.")
            continue
        param = {
            "part": "snippet,liveStreamingDetails",
            "id": ",".join(videos),
            "key": yt_api_key.get(),
        }
        vtlog.info(f"|-- Processing: {chan}")
        video_to_fetch.append(fetch_apis(sessions, "videos", param, chan, aff))

    if not video_to_fetch:
        vtlog.warn("|== No video to fetch, bailing!")
        await sessions.close()
        return 0

    vtlog.info("Firing API fetching!")
    time_past_limit = current_time() - (6 * 60 * 60)
    for task in asyncio.as_completed(video_to_fetch):
        video_results, ch_id, affliate = await task
        if ch_id not in youtube_lives_data:
            youtube_lives_data[ch_id] = []
        if ch_id not in ended_video_ids:
            ended_video_ids[ch_id] = []
        vtlog.info(f"|== Parsing videos data for: {ch_id}")
        youtube_videos_data = youtube_lives_data[ch_id]
        for res_item in video_results["items"]:
            video_id = res_item["id"]
            if "liveStreamingDetails" not in res_item:
                # Assume normal video
                ended_video_ids[ch_id].append(video_id)
                continue
            snippets = res_item["snippet"]
            livedetails = res_item["liveStreamingDetails"]
            if not livedetails:
                # Assume normal video
                ended_video_ids[ch_id].append(video_id)
                continue
            broadcast_cnt = snippets["liveBroadcastContent"]
            if not broadcast_cnt:
                broadcast_cnt = "unknown"
            if broadcast_cnt not in ("live", "upcoming"):
                broadcast_cnt = "unknown"

            title = snippets["title"]
            channel = snippets["channelId"]
            start_time = 0
            if "scheduledStartTime" in livedetails:
                start_time = datetime_yt_parse(
                    livedetails["scheduledStartTime"])
            if "actualStartTime" in livedetails:
                start_time = datetime_yt_parse(livedetails["actualStartTime"])
            thumbs = f"https://i.ytimg.com/vi/{video_id}/maxresdefault.jpg"

            dd_hell = {
                "id": video_id,
                "title": title,
                "status": broadcast_cnt,
                "startTime": start_time,
                "endTime": None,
                "thumbnail": thumbs,
                "group": affliate,
                "platform": "youtube",
            }
            if "actualEndTime" in livedetails:
                dd_hell["endTime"] = datetime_yt_parse(
                    livedetails["actualEndTime"])
                dd_hell["status"] = "past"

            if dd_hell["status"] == "past" and time_past_limit >= dd_hell[
                    "endTime"]:
                vtlog.warning(
                    f"Removing: {video_id} since it's way past the time limit."
                )
                ended_video_ids[ch_id].append(video_id)
                continue

            vtlog.info("Adding: {}".format(video_id))
            youtube_videos_data.append(dd_hell)

        youtube_lives_data[ch_id] = youtube_videos_data
        vtlog.info(f"|== Updating database ({ch_id})...")
        upd_data = {ch_id: youtube_videos_data}
        try:
            await asyncio.wait_for(
                DatabaseConn.update_data("yt_other_livedata", upd_data), 15.0)
        except asyncio.TimeoutError:
            await DatabaseConn.release()
            DatabaseConn.raise_error()
            vtlog.error(
                f"Failed to fetch update live data for {ch_id}, timeout by 15s..."
            )

    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("yt_other_ended_ids", ended_video_ids),
            15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error("Failed to update ended video ids, timeout by 15s...")

    vtlog.info("Closing sessions...")
    await sessions.close()
Example #8
0
async def youtube_channels(DatabaseConn: VTBiliDatabase, dataset: dict,
                           yt_api_key: RotatingAPIKey):
    sessions = aiohttp.ClientSession(
        headers={"User-Agent": "VTBSchedule/0.9.0"})

    vtlog.info("Creating task for channels data.")
    channels_tasks = []
    for channel in dataset:
        vtlog.info(f"|-> Adding {channel['name']}")
        param = {
            "part": "snippet,statistics",
            "id": channel["id"],
            "key": yt_api_key.get(),
        }
        channels_tasks.append(
            fetch_apis(sessions, "channels", param, channel["name"],
                       channel["affiliates"]))

    vtlog.info("Running all tasks...")
    for chan_task in asyncio.as_completed(channels_tasks):
        chan_data, chan_name, chan_aff = await chan_task
        vtlog.info(f"|--> Processing: {chan_name}")

        if "items" not in chan_data:
            vtlog.warn(f"|--! Failed to fetch: {chan_name}")
            continue
        chan_data = chan_data["items"]
        if not chan_data:
            vtlog.warn(f"|--! Empty data on {chan_name}")
            continue
        chan_data = chan_data[0]
        if not chan_data:
            vtlog.warn(f"|--! Empty data on {chan_name}")
            continue

        chan_snip = chan_data["snippet"]
        chan_stats = chan_data["statistics"]

        ch_id = chan_data["id"]
        title = chan_snip["title"]
        desc = chan_snip["description"]
        pubat = chan_snip["publishedAt"]

        thumbs_data = chan_snip["thumbnails"]
        if "high" in thumbs_data:
            thumbs = thumbs_data["high"]["url"]
        elif "medium" in thumbs_data:
            thumbs = thumbs_data["medium"]["url"]
        else:
            thumbs = thumbs_data["default"]["url"]

        subscount = chan_stats["subscriberCount"]
        viewcount = chan_stats["viewCount"]
        vidcount = chan_stats["videoCount"]

        try:
            subscount = int(subscount)
            viewcount = int(viewcount)
            vidcount = int(vidcount)
        except ValueError:
            pass

        data = {
            "id": ch_id,
            "name": title,
            "description": desc,
            "publishedAt": pubat,
            "thumbnail": thumbs,
            "group": chan_aff,
            "subscriberCount": subscount,
            "viewCount": viewcount,
            "videoCount": vidcount,
            "platform": "youtube",
        }

        vtlog.info(f"Updating channels database for {ch_id}...")
        try:
            await asyncio.wait_for(
                DatabaseConn.update_data("yt_other_channels", {ch_id: data}),
                15.0)
        except asyncio.TimeoutError:
            await DatabaseConn.release()
            DatabaseConn.raise_error()
            vtlog.error("Failed to update channels data, timeout by 15s...")

    await sessions.close()
Example #9
0
async def youtube_live_heartbeat(DatabaseConn: VTBiliDatabase,
                                 affliates_dataset: dict,
                                 yt_api_key: RotatingAPIKey):
    session = aiohttp.ClientSession(
        headers={"User-Agent": "VTBSchedule/0.9.0"})

    vtlog.info("Fetching live data...")

    try:
        youtube_lives_data = await asyncio.wait_for(
            DatabaseConn.fetch_data("yt_other_livedata"), 15.0)
        ended_video_ids = await asyncio.wait_for(
            DatabaseConn.fetch_data("yt_other_ended_ids"), 15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error("Failed to fetch youtube live database, skipping run.")
        await session.close()
        return
    del youtube_lives_data["_id"]

    videos_list = []
    videos_set = {}
    for cid, data in youtube_lives_data.items():
        for vd in data:
            if vd["status"] in ("unknown"):
                continue
            videos_list.append(vd["id"])
            videos_set[vd["id"]] = cid

    if not videos_list:
        vtlog.warn("No live/upcoming videos, bailing!")
        await session.close()
        return 0

    chunked_videos_list = [
        videos_list[i:i + 40] for i in range(0, len(videos_list), 40)
    ]
    items_data_data = []
    for chunk_n, chunk_list in enumerate(chunked_videos_list, 1):
        vtlog.info(
            f"Checking heartbeat for chunk {chunk_n} out of {len(chunked_videos_list)} chunks"
        )
        param = {
            "part": "snippet,liveStreamingDetails",
            "id": ",".join(chunk_list),
            "key": yt_api_key.get(),
        }
        items_data, _, _ = await fetch_apis(session, "videos", param,
                                            "nullify", "nullify")
        items_data_data.extend(items_data["items"])
    await session.close()

    parsed_ids = {}
    vtlog.info("Parsing results...")
    time_past_limit = current_time() - (6 * 60 * 60)
    for res_item in items_data_data:
        video_id = res_item["id"]
        vtlog.info(f"|-- Checking {video_id} heartbeat...")
        snippets = res_item["snippet"]
        channel_id = snippets["channelId"]
        if channel_id not in ended_video_ids:
            ended_video_ids[channel_id] = []
        if "liveStreamingDetails" not in res_item:
            continue
        livedetails = res_item["liveStreamingDetails"]
        status_live = "upcoming"
        start_time = 0
        end_time = 0
        if "scheduledStartTime" in livedetails:
            start_time = datetime_yt_parse(livedetails["scheduledStartTime"])
        if "actualStartTime" in livedetails:
            status_live = "live"
            start_time = datetime_yt_parse(livedetails["actualStartTime"])
        if "actualEndTime" in livedetails:
            status_live = "past"
            end_time = datetime_yt_parse(livedetails["actualEndTime"])
        view_count = None
        if "concurrentViewers" in livedetails:
            view_count = livedetails["concurrentViewers"]
            try:
                view_count = int(view_count)
            except ValueError:
                pass
        thumbs = f"https://i.ytimg.com/vi/{video_id}/maxresdefault.jpg"
        vtlog.info(f"|--> Update status for {video_id}: {status_live}")
        new_streams_data = []
        for data_streams in youtube_lives_data[channel_id]:
            if "group" not in data_streams:
                data_streams["group"] = affliates_dataset[channel_id]
            if data_streams["id"] == video_id:
                append_data = {
                    "id": data_streams["id"],
                    "title": snippets["title"],
                    "status": status_live,
                    "startTime": start_time,
                    "endTime": None,
                    "group": data_streams["group"],
                    "thumbnail": thumbs,
                    "platform": "youtube",
                }
                if view_count is not None:
                    append_data["viewers"] = view_count
                if status_live == "past":
                    append_data["endTime"] = end_time
                if status_live == "past" and time_past_limit >= end_time:
                    vtlog.warning(
                        f"Removing: {video_id} since it's way past the time limit."
                    )
                    ended_video_ids[channel_id].append(video_id)
                    continue
                new_streams_data.append(append_data)
            else:
                if data_streams["status"] == "past":
                    if time_past_limit >= data_streams["endTime"]:
                        vtlog.warning(
                            f"Removing: {video_id} since it's way past the time limit."
                        )
                        ended_video_ids[channel_id].append(video_id)
                        continue
                new_streams_data.append(data_streams)
        new_streams_data = await check_for_doubles(new_streams_data)
        youtube_lives_data[channel_id] = new_streams_data
        vtlog.info(f"|-- Updating heartbeat for channel {channel_id}...")
        try:
            await asyncio.wait_for(
                DatabaseConn.update_data("yt_other_livedata",
                                         {channel_id: new_streams_data}), 15.0)
        except asyncio.TimeoutError:
            await DatabaseConn.release()
            DatabaseConn.raise_error()
            vtlog.error(
                f"|--! Failed to update heartbeat for channel {channel_id}, timeout by 15s..."
            )
        parsed_ids[video_id] = channel_id

    # Filter this if the video is privated.
    parsed_ids_keys = list(parsed_ids.keys())
    for video in videos_list:
        if video not in parsed_ids_keys:
            chan_id = videos_set[video]
            channel_data = youtube_lives_data[chan_id]
            new_channel_data = []
            for ch_vid in channel_data:
                if ch_vid["id"] != video:
                    new_channel_data.append(ch_vid)
                else:
                    if chan_id not in ended_video_ids:
                        ended_video_ids[chan_id] = []
                    ended_video_ids[chan_id].append(ch_vid["id"])
            vtlog.info(
                f"|-- Updating heartbeat filter for channel {chan_id}...")
            try:
                await asyncio.wait_for(
                    DatabaseConn.update_data("yt_other_livedata",
                                             {chan_id: new_channel_data}),
                    15.0)
            except asyncio.TimeoutError:
                await DatabaseConn.release()
                DatabaseConn.raise_error()
                vtlog.error(
                    f"|--! Failed to update heartbeat for channel {chan_id}, timeout by 15s..."
                )

    try:
        await asyncio.wait_for(
            DatabaseConn.update_data("yt_other_ended_ids", ended_video_ids),
            15.0)
    except asyncio.TimeoutError:
        await DatabaseConn.release()
        DatabaseConn.raise_error()
        vtlog.error("Failed to update ended video ids, timeout by 15s...")