def __init__(self, username, user_agent):
     cookies_path = os.path.join(Path().absolute(), "cookies")
     Path(cookies_path).mkdir(parents=True, exist_ok=True)
     self.cookies_file = os.path.join(cookies_path, f"{username}.pkl")
     self.user_agent = user_agent
     self.twitch_login = TwitchLogin(CLIENT_ID, username, self.user_agent)
     self.running = True
class Twitch(object):
    __slots__ = ["cookies_file", "user_agent", "twitch_login", "running"]

    def __init__(self, username, user_agent, password=None):
        cookies_path = os.path.join(Path().absolute(), "cookies")
        Path(cookies_path).mkdir(parents=True, exist_ok=True)
        self.cookies_file = os.path.join(cookies_path, f"{username}.pkl")
        self.user_agent = user_agent
        self.twitch_login = TwitchLogin(
            CLIENT_ID, username, self.user_agent, password=password
        )
        self.running = True

    def login(self):
        if os.path.isfile(self.cookies_file) is False:
            if self.twitch_login.login_flow():
                self.twitch_login.save_cookies(self.cookies_file)
        else:
            self.twitch_login.load_cookies(self.cookies_file)
            self.twitch_login.set_token(self.twitch_login.get_auth_token())

    # === STREAMER / STREAM / INFO === #
    def update_stream(self, streamer):
        if streamer.stream.update_required() is True:
            stream_info = self.get_stream_info(streamer)
            if stream_info is not None:
                streamer.stream.update(
                    broadcast_id=stream_info["stream"]["id"],
                    title=stream_info["broadcastSettings"]["title"],
                    game=stream_info["broadcastSettings"]["game"],
                    tags=stream_info["stream"]["tags"],
                    viewers_count=stream_info["stream"]["viewersCount"],
                )

                event_properties = {
                    "channel_id": streamer.channel_id,
                    "broadcast_id": streamer.stream.broadcast_id,
                    "player": "site",
                    "user_id": self.twitch_login.get_user_id(),
                }

                if (
                    streamer.stream.game_name() is not None
                    and streamer.settings.claim_drops is True
                ):
                    event_properties["game"] = streamer.stream.game_name()
                    # Update also the campaigns_ids so we are sure to tracking the correct campaign
                    streamer.stream.campaigns_ids = (
                        self.__get_campaign_ids_from_streamer(streamer)
                    )

                streamer.stream.payload = [
                    {"event": "minute-watched", "properties": event_properties}
                ]

    def get_spade_url(self, streamer):
        try:
            headers = {"User-Agent": self.user_agent}
            main_page_request = requests.get(streamer.streamer_url, headers=headers)
            response = main_page_request.text
            regex_settings = "(https://static.twitchcdn.net/config/settings.*?js)"
            settings_url = re.search(regex_settings, response).group(1)

            settings_request = requests.get(settings_url, headers=headers)
            response = settings_request.text
            regex_spade = '"spade_url":"(.*?)"'
            streamer.stream.spade_url = re.search(regex_spade, response).group(1)
        except requests.exceptions.RequestException as e:
            logger.error(f"Something went wrong during extraction of 'spade_url': {e}")

    def get_broadcast_id(self, streamer):
        json_data = copy.deepcopy(GQLOperations.WithIsStreamLiveQuery)
        json_data["variables"] = {"id": streamer.channel_id}
        response = self.post_gql_request(json_data)
        if response != {}:
            stream = response["data"]["user"]["stream"]
            if stream is not None:
                return stream["id"]
            else:
                raise StreamerIsOfflineException

    def get_stream_info(self, streamer):
        json_data = copy.deepcopy(GQLOperations.VideoPlayerStreamInfoOverlayChannel)
        json_data["variables"] = {"channel": streamer.username}
        response = self.post_gql_request(json_data)
        if response != {}:
            if response["data"]["user"]["stream"] is None:
                raise StreamerIsOfflineException
            else:
                return response["data"]["user"]

    def check_streamer_online(self, streamer):
        if time.time() < streamer.offline_at + 60:
            return

        if streamer.is_online is False:
            try:
                self.get_spade_url(streamer)
                self.update_stream(streamer)
            except StreamerIsOfflineException:
                streamer.set_offline()
            else:
                streamer.set_online()
        else:
            try:
                self.update_stream(streamer)
            except StreamerIsOfflineException:
                streamer.set_offline()

    def get_channel_id(self, streamer_username):
        json_response = self.__do_helix_request(f"/users?login={streamer_username}")
        if "data" not in json_response:
            raise StreamerDoesNotExistException
        else:
            data = json_response["data"]
            if len(data) >= 1:
                return data[0]["id"]
            else:
                raise StreamerDoesNotExistException

    def get_followers(self, first=100):
        followers = []
        pagination = {}
        while 1:
            query = f"/users/follows?from_id={self.twitch_login.get_user_id()}&first={first}"
            if pagination != {}:
                query += f"&after={pagination['cursor']}"

            json_response = self.__do_helix_request(query)
            pagination = json_response["pagination"]
            followers += [fw["to_login"].lower() for fw in json_response["data"]]
            time.sleep(random.uniform(0.3, 0.7))

            if pagination == {}:
                break

        return followers

    def update_raid(self, streamer, raid):
        if streamer.raid != raid:
            streamer.raid = raid
            json_data = copy.deepcopy(GQLOperations.JoinRaid)
            json_data["variables"] = {"input": {"raidID": raid.raid_id}}
            self.post_gql_request(json_data)

            logger.info(
                f"Joining raid from {streamer} to {raid.target_login}!",
                extra={"emoji": ":performing_arts:"},
            )

    def viewer_is_mod(self, streamer):
        json_data = copy.deepcopy(GQLOperations.ModViewChannelQuery)
        json_data["variables"] = {"channelLogin": streamer.username}
        response = self.post_gql_request(json_data)
        try:
            streamer.viewer_is_mod = response["data"]["user"]["self"]["isModerator"]
        except (ValueError, KeyError):
            streamer.viewer_is_mod = False

    # === 'GLOBALS' METHODS === #
    # Create chunk of sleep of speed-up the break loop after CTRL+C
    def __chuncked_sleep(self, seconds, chunk_size=3):
        sleep_time = max(seconds, 0) / chunk_size
        for i in range(0, chunk_size):
            time.sleep(sleep_time)
            if self.running is False:
                break

    def __check_connection_handler(self, chunk_size):
        # The success rate It's very hight usually. Why we have failed?
        # Check internet connection ...
        while internet_connection_available() is False:
            random_sleep = random.randint(1, 3)
            logger.warning(
                f"No internet connection available! Retry after {random_sleep}m"
            )
            self.__chuncked_sleep(random_sleep * 60, chunk_size=chunk_size)

    def __do_helix_request(self, query, response_as_json=True):
        url = f"{API}/helix/{query.strip('/')}"
        response = self.twitch_login.session.get(url)
        logger.debug(
            f"Query: {query}, Status code: {response.status_code}, Content: {response.json()}"
        )
        return response.json() if response_as_json is True else response

    def post_gql_request(self, json_data):
        try:
            response = requests.post(
                GQLOperations.url,
                json=json_data,
                headers={
                    "Authorization": f"OAuth {self.twitch_login.get_auth_token()}",
                    "Client-Id": CLIENT_ID,
                    "User-Agent": self.user_agent,
                },
            )
            logger.debug(
                f"Data: {json_data}, Status code: {response.status_code}, Content: {response.text}"
            )
            return response.json()
        except requests.exceptions.RequestException as e:
            logger.error(
                f"Error with GQLOperations ({json_data['operationName']}): {e}"
            )
            return {}

    def send_minute_watched_events(self, streamers, priority, chunk_size=3):
        while self.running:
            try:
                streamers_index = [
                    i
                    for i in range(0, len(streamers))
                    if streamers[i].is_online is True
                    and (
                        streamers[i].online_at == 0
                        or (time.time() - streamers[i].online_at) > 30
                    )
                ]

                for index in streamers_index:
                    if (streamers[index].stream.update_elapsed() / 60) > 10:
                        # Why this user It's currently online but the last updated was more than 10minutes ago?
                        # Please perform a manually update and check if the user it's online
                        self.check_streamer_online(streamers[index])

                streamers_watching = []
                for prior in priority:
                    if prior == Priority.ORDER and len(streamers_watching) < max_streams:
                        # Get the first 2 items, they are already in order
                        streamers_watching += streamers_index[:max_streams]

                    elif (
                        prior in [Priority.POINTS_ASCENDING, Priority.POINTS_DESCEDING]
                        and len(streamers_watching) < max_streams
                    ):
                        items = [
                            {"points": streamers[index].channel_points, "index": index}
                            for index in streamers_index
                        ]
                        items = sorted(
                            items,
                            key=lambda x: x["points"],
                            reverse=(
                                True if prior == Priority.POINTS_DESCEDING else False
                            ),
                        )
                        streamers_watching += [item["index"] for item in items][:max_streams]

                    elif prior == Priority.STREAK and len(streamers_watching) < max_streams:
                        """
                        Check if we need need to change priority based on watch streak
                        Viewers receive points for returning for x consecutive streams.
                        Each stream must be at least 10 minutes long and it must have been at least 30 minutes since the last stream ended.
                        Watch at least 6m for get the +10
                        """
                        for index in streamers_index:
                            if (
                                streamers[index].settings.watch_streak is True
                                and streamers[index].stream.watch_streak_missing is True
                                and (
                                    streamers[index].offline_at == 0
                                    or (
                                        (time.time() - streamers[index].offline_at)
                                        // 60
                                    )
                                    > 30
                                )
                                and streamers[index].stream.minute_watched < 7
                            ):
                                streamers_watching.append(index)
                                if len(streamers_watching) == max_streams:
                                    break

                    elif prior == Priority.DROPS and len(streamers_watching) < max_streams:
                        for index in streamers_index:
                            if streamers[index].drops_condition() is True:
                                streamers_watching.append(index)
                                if len(streamers_watching) == max_streams:
                                    break

                """
                Twitch has a limit - you can't watch more than 2 channels at one time.
                We take the first two streamers from the list as they have the highest priority (based on order or WatchStreak).
                """
                streamers_watching = streamers_watching[:max_streams]

                for index in streamers_watching:
                    next_iteration = time.time() + 60 / len(streamers_watching)

                    try:
                        response = requests.post(
                            streamers[index].stream.spade_url,
                            data=streamers[index].stream.encode_payload(),
                            headers={"User-Agent": self.user_agent},
                            timeout=60,
                        )
                        logger.debug(
                            f"Send minute watched request for {streamers[index]} - Status code: {response.status_code}"
                        )
                        if response.status_code == 204:
                            streamers[index].stream.update_minute_watched()

                            """
                            Remember, you can only earn progress towards a time-based Drop on one participating channel at a time.  [ ! ! ! ]
                            You can also check your progress towards Drops within a campaign anytime by viewing the Drops Inventory.
                            For time-based Drops, if you are unable to claim the Drop in time, you will be able to claim it from the inventory page until the Drops campaign ends.
                            """

                            for campaign in streamers[index].stream.campaigns:
                                for drop in campaign.drops:
                                    # We could add .has_preconditions_met condition inside is_printable
                                    if (
                                        drop.has_preconditions_met is not False
                                        and drop.is_printable is True
                                    ):
                                        # print("=" * 125)
                                        logger.info(
                                            f"{streamers[index]} is streaming {streamers[index].stream}"
                                        )
                                        logger.info(f"Campaign: {campaign}")
                                        logger.info(f"Drop: {drop}")
                                        logger.info(f"{drop.progress_bar()}")
                                        # print("=" * 125)

                    except requests.exceptions.ConnectionError as e:
                        logger.error(f"Error while trying to send minute watched: {e}")
                        self.__check_connection_handler(chunk_size)
                    except requests.exceptions.Timeout as e:
                        logger.error(f"Error while trying to send minute watched: {e}")

                    self.__chuncked_sleep(
                        next_iteration - time.time(), chunk_size=chunk_size
                    )

                if streamers_watching == []:
                    self.__chuncked_sleep(60, chunk_size=chunk_size)
            except Exception:
                logger.error("Exception raised in send minute watched", exc_info=True)

    # === CHANNEL POINTS / PREDICTION === #
    # Load the amount of current points for a channel, check if a bonus is available
    def load_channel_points_context(self, streamer):
        json_data = copy.deepcopy(GQLOperations.ChannelPointsContext)
        json_data["variables"] = {"channelLogin": streamer.username}

        response = self.post_gql_request(json_data)
        if response != {}:
            if response["data"]["community"] is None:
                raise StreamerDoesNotExistException
            channel = response["data"]["community"]["channel"]
            community_points = channel["self"]["communityPoints"]
            streamer.channel_points = community_points["balance"]

            if community_points["availableClaim"] is not None:
                self.claim_bonus(streamer, community_points["availableClaim"]["id"])

    def make_predictions(self, event):
        decision = event.bet.calculate(event.streamer.channel_points)
        selector_index = 0 if decision["choice"] == "A" else 1

        logger.info(
            f"Going to complete bet for {event}",
            extra={
                "emoji": ":four_leaf_clover:",
                "color": Settings.logger.color_palette.BET_GENERAL,
            },
        )
        if event.status == "ACTIVE":
            skip, compared_value = event.bet.skip()
            if skip is True:
                logger.info(
                    f"Skip betting for the event {event}",
                    extra={
                        "emoji": ":pushpin:",
                        "color": Settings.logger.color_palette.BET_FILTERS,
                    },
                )
                logger.info(
                    f"Skip settings {event.bet.settings.filter_condition}, current value is: {compared_value}",
                    extra={
                        "emoji": ":pushpin:",
                        "color": Settings.logger.color_palette.BET_FILTERS,
                    },
                )
            else:
                if decision["amount"] >= 10:
                    logger.info(
                        f"Place {_millify(decision['amount'])} channel points on: {event.bet.get_outcome(selector_index)}",
                        extra={
                            "emoji": ":four_leaf_clover:",
                            "color": Settings.logger.color_palette.BET_GENERAL,
                        },
                    )

                    json_data = copy.deepcopy(GQLOperations.MakePrediction)
                    json_data["variables"] = {
                        "input": {
                            "eventID": event.event_id,
                            "outcomeID": decision["id"],
                            "points": decision["amount"],
                            "transactionID": token_hex(16),
                        }
                    }
                    return self.post_gql_request(json_data)
        else:
            logger.info(
                f"Oh no! The event is not active anymore! Current status: {event.status}",
                extra={
                    "emoji": ":disappointed_relieved:",
                    "color": Settings.logger.color_palette.BET_FAILED,
                },
            )

    def claim_bonus(self, streamer, claim_id):
        if Settings.logger.less is False:
            logger.info(
                f"Claiming the bonus for {streamer}!", extra={"emoji": ":gift:"}
            )

        json_data = copy.deepcopy(GQLOperations.ClaimCommunityPoints)
        json_data["variables"] = {
            "input": {"channelID": streamer.channel_id, "claimID": claim_id}
        }
        self.post_gql_request(json_data)

    # === CAMPAIGNS / DROPS / INVENTORY === #
    def __get_campaign_ids_from_streamer(self, streamer):
        json_data = copy.deepcopy(GQLOperations.DropsHighlightService_AvailableDrops)
        json_data["variables"] = {"channelID": streamer.channel_id}
        response = self.post_gql_request(json_data)
        try:
            return (
                []
                if response["data"]["channel"]["viewerDropCampaigns"] is None
                else [
                    item["id"]
                    for item in response["data"]["channel"]["viewerDropCampaigns"]
                ]
            )
        except (ValueError, KeyError):
            return []

    def __get_inventory(self):
        response = self.post_gql_request(GQLOperations.Inventory)
        try:
            return (
                response["data"]["currentUser"]["inventory"] if response != {} else {}
            )
        except (ValueError, KeyError, TypeError):
            return {}

    def __get_drops_dashboard(self, status=None):
        response = self.post_gql_request(GQLOperations.ViewerDropsDashboard)
        campaigns = response["data"]["currentUser"]["dropCampaigns"]
        if status is not None:
            campaigns = list(filter(lambda x: x["status"] == status.upper(), campaigns))
        return campaigns

    def __get_campaigns_details(self, campaigns):
        result = []
        chunks = create_chunks(campaigns, 20)
        for chunk in chunks:
            json_data = []
            for campaign in chunk:
                json_data.append(copy.deepcopy(GQLOperations.DropCampaignDetails))
                json_data[-1]["variables"] = {
                    "dropID": campaign["id"],
                    "channelLogin": f"{self.twitch_login.get_user_id()}",
                }

            response = self.post_gql_request(json_data)
            result += list(map(lambda x: x["data"]["user"]["dropCampaign"], response))
        return result

    def __sync_campaigns(self, campaigns):
        # We need the inventory only for get the real updated value/progress
        # Get data from inventory and sync current status with streamers.campaigns
        inventory = self.__get_inventory()
        if inventory not in [None, {}] and inventory["dropCampaignsInProgress"] not in [
            None,
            {},
        ]:
            # Iterate all campaigns from dashboard (only active, with working drops)
            # In this array we have also the campaigns never started from us (not in nventory)
            for i in range(len(campaigns)):
                campaigns[i].clear_drops()  # Remove all the claimed drops
                # Iterate all campaigns currently in progress from out inventory
                for progress in inventory["dropCampaignsInProgress"]:
                    if progress["id"] == campaigns[i].id:
                        campaigns[i].in_inventory = True
                        campaigns[i].sync_drops(
                            progress["timeBasedDrops"], self.claim_drop
                        )
                        campaigns[i].clear_drops()  # Remove all the claimed drops
                        break
        return campaigns

    def claim_drop(self, drop):
        logger.info(f"Claim {drop}", extra={"emoji": ":package:"})

        json_data = copy.deepcopy(GQLOperations.DropsPage_ClaimDropRewards)
        json_data["variables"] = {"input": {"dropInstanceID": drop.drop_instance_id}}
        response = self.post_gql_request(json_data)
        try:
            # response["data"]["claimDropRewards"] can be null and respose["data"]["errors"] != []
            # or response["data"]["claimDropRewards"]["status"] === DROP_INSTANCE_ALREADY_CLAIMED
            if ("claimDropRewards" in response["data"]) and (
                response["data"]["claimDropRewards"] is None
            ):
                return False
            elif ("errors" in response["data"]) and (response["data"]["errors"] != []):
                return False
            elif ("claimDropRewards" in response["data"]) and (
                response["data"]["claimDropRewards"]["status"]
                in ["ELIGIBLE_FOR_ALL", "DROP_INSTANCE_ALREADY_CLAIMED"]
            ):
                return True
            else:
                return False
        except (ValueError, KeyError):
            return False

    def claim_all_drops_from_inventory(self):
        inventory = self.__get_inventory()
        if inventory not in [None, {}]:
            if inventory["dropCampaignsInProgress"] not in [None, {}]:
                for campaign in inventory["dropCampaignsInProgress"]:
                    for drop_dict in campaign["timeBasedDrops"]:
                        drop = Drop(drop_dict)
                        drop.update(drop_dict["self"])
                        if drop.is_claimable is True:
                            drop.is_claimed = self.claim_drop(drop)
                            time.sleep(random.uniform(5, 10))

    def sync_campaigns(self, streamers, chunk_size=3):
        campaigns_update = 0
        while self.running:
            try:
                # Get update from dashboard each 60minutes
                if (
                    campaigns_update == 0
                    or ((time.time() - campaigns_update) / 60) > 60
                ):
                    campaigns_update = time.time()
                    # Get full details from current ACTIVE campaigns
                    # Use dashboard so we can explore new drops not currently active in our Inventory
                    campaigns_details = self.__get_campaigns_details(
                        self.__get_drops_dashboard(status="ACTIVE")
                    )
                    campaigns = []

                    # Going to clear array and structure. Remove all the timeBasedDrops expired or not started yet
                    for index in range(0, len(campaigns_details)):
                        campaign = Campaign(campaigns_details[index])
                        if campaign.dt_match is True:
                            # Remove all the drops already claimed or with dt not matching
                            campaign.clear_drops()
                            if campaign.drops != []:
                                campaigns.append(campaign)

                # Divide et impera :)
                campaigns = self.__sync_campaigns(campaigns)

                # Check if user It's currently streaming the same game present in campaigns_details
                for i in range(0, len(streamers)):
                    if streamers[i].drops_condition() is True:
                        # yes! The streamer[i] have the drops_tags enabled and we It's currently stream a game with campaign active!
                        # With 'campaigns_ids' we are also sure that this streamer have the campaign active.
                        # yes! The streamer[index] have the drops_tags enabled and we It's currently stream a game with campaign active!
                        streamers[i].stream.campaigns = list(
                            filter(
                                lambda x: x.drops != []
                                and x.game == streamers[i].stream.game
                                and x.id in streamers[i].stream.campaigns_ids,
                                campaigns,
                            )
                        )

            except (ValueError, KeyError, requests.exceptions.ConnectionError) as e:
                logger.error(f"Error while syncing inventory: {e}")
                self.__check_connection_handler(chunk_size)

            self.__chuncked_sleep(60, chunk_size=chunk_size)
class Twitch:
    def __init__(self, username):
        cookies_path = os.path.join(Path().absolute(), "cookies")
        Path(cookies_path).mkdir(parents=True, exist_ok=True)
        self.cookies_file = os.path.join(cookies_path, f"{username}.pkl")
        self.twitch_login = TwitchLogin(TWITCH_CLIENT_ID, username)
        self.running = True

    def login(self):
        if os.path.isfile(self.cookies_file) is False:
            if self.twitch_login.login_flow():
                self.twitch_login.save_cookies(self.cookies_file)
        else:
            self.twitch_login.load_cookies(self.cookies_file)

    def update_minute_watched_event_request(self, streamer):
        event_properties = {
            "channel_id": streamer.channel_id,
            "broadcast_id": self.get_broadcast_id(streamer),
            "player": "site",
            "user_id": self.twitch_login.get_user_id(),
        }
        minute_watched = [{
            "event": "minute-watched",
            "properties": event_properties
        }]
        json_event = json.dumps(minute_watched, separators=(",", ":"))
        streamer.minute_watched_requests = RequestInfo(
            self.get_minute_watched_request_url(streamer),
            {"data": (b64encode(json_event.encode("utf-8"))).decode("utf-8")},
        )

    def get_minute_watched_request_url(self, streamer):
        main_page_request = requests.get(streamer.streamer_url,
                                         headers={"User-Agent": USER_AGENT})
        response = main_page_request.text
        settings_url = re.search(
            "(https://static.twitchcdn.net/config/settings.*?js)",
            response).group(1)

        settings_request = requests.get(settings_url,
                                        headers={"User-Agent": USER_AGENT})
        response = settings_request.text
        minute_watched_request_url = re.search('"spade_url":"(.*?)"',
                                               response).group(1)
        return minute_watched_request_url

    def post_gql_request(self, json_data):
        response = requests.post(
            "https://gql.twitch.tv/gql",
            json=json_data,
            headers={
                "Authorization": f"OAuth {self.twitch_login.get_auth_token()}",
                "Client-Id": TWITCH_CLIENT_ID,
                "User-Agent": USER_AGENT,
            },
        )
        return response.json()

    def get_broadcast_id(self, streamer):
        json_data = {
            "operationName": "WithIsStreamLiveQuery",
            "variables": {
                "id": streamer.channel_id
            },
            "extensions": {
                "persistedQuery": {
                    "version":
                    1,
                    "sha256Hash":
                    "04e46329a6786ff3a81c01c50bfa5d725902507a0deb83b0edbf7abe7a3716ea",
                }
            },
        }
        response = self.post_gql_request(json_data)
        stream = response["data"]["user"]["stream"]
        if stream is not None:
            return stream["id"]
        else:
            raise StreamerIsOfflineException

    def check_streamer_online(self, streamer):
        if time.time() < streamer.offline_at + 60:
            return

        if streamer.is_online is False:
            try:
                self.update_minute_watched_event_request(streamer)
            except StreamerIsOfflineException:
                streamer.set_offline()
            else:
                streamer.set_online()

    def claim_bonus(self, streamer, claim_id):
        logger.info(
            emoji.emojize(f":gift:  Claiming the bonus for {streamer}!",
                          use_aliases=True))
        json_data = {
            "operationName": "ClaimCommunityPoints",
            "variables": {
                "input": {
                    "channelID": streamer.channel_id,
                    "claimID": claim_id
                }
            },
            "extensions": {
                "persistedQuery": {
                    "version":
                    1,
                    "sha256Hash":
                    "46aaeebe02c99afdf4fc97c7c0cba964124bf6b0af229395f1f6d1feed05b3d0",
                }
            },
        }
        self.post_gql_request(json_data)

    # Load the amount of current points for a channel, check if a bonus is available
    def load_channel_points_context(self, streamer):
        json_data = {
            "operationName": "ChannelPointsContext",
            "variables": {
                "channelLogin": streamer.username
            },
            "extensions": {
                "persistedQuery": {
                    "version":
                    1,
                    "sha256Hash":
                    "9988086babc615a918a1e9a722ff41d98847acac822645209ac7379eecb27152",
                }
            },
        }
        response = self.post_gql_request(json_data)
        if response["data"]["community"] is None:
            raise StreamerDoesNotExistException
        community_points = response["data"]["community"]["channel"]["self"][
            "communityPoints"]
        streamer.channel_points = community_points["balance"]
        # logger.info(f"{streamer.channel_points} channel points for {streamer.username}!")
        if community_points["availableClaim"] is not None:
            self.claim_bonus(streamer,
                             community_points["availableClaim"]["id"])

    def make_predictions(self, event):
        decision = event.bet.calculate(event.streamer.channel_points)
        return self.post_gql_request({
            "operationName": "MakePrediction",
            "variables": {
                "input": {
                    "eventID": event.event_id,
                    "outcomeID": decision["id"],
                    "points": decision["amount"],
                    "transactionID":
                    "412118d3********79ac856",  # How we can calculate this?
                }
            },
            "extensions": {
                "persistedQuery": {
                    "version":
                    1,
                    "sha256Hash":
                    "b44682ecc88358817009f20e69d75081b1e58825bb40aa53d5dbadcc17c881d8",
                }
            },
        })

    def send_minute_watched_events(self, streamers):
        headers = {"user-agent": USER_AGENT}
        while self.running:
            # Twitch has a limit - you can't watch more than 2 channels at one time.
            # We take the first two streamers from the list as they have the highest priority.
            streamers_watching = [
                streamer for streamer in streamers if streamer.is_online
            ][:2]
            for streamer in streamers_watching:
                next_iteration = time.time() + 60 / len(streamers_watching)
                try:
                    response = requests.post(
                        streamer.minute_watched_requests.url,
                        data=streamer.minute_watched_requests.payload,
                        headers=headers,
                    )
                    logger.debug(
                        f"Send minute watched request for streamer {streamer.username} ({streamer.channel_id}) - Status code: {response.status_code}"
                    )
                except requests.exceptions.ConnectionError as e:
                    logger.error(f"Error while trying to watch a minute: {e}")

                # Create chunk of sleep of speed-up the break loop after CTRL+C
                sleep_time = max(next_iteration - time.time(), 0) / 3
                for i in range(0, 3):
                    time.sleep(sleep_time)
                    if self.running is False:
                        break

            if not streamers_watching:
                time.sleep(60)

    def get_channel_id(self, streamer_username):
        response = requests.get(
            f"https://api.twitch.tv/helix/users?login={streamer_username}",
            headers={
                "Authorization":
                f"Bearer {self.twitch_login.get_auth_token()}",
                "Client-Id": TWITCH_CLIENT_ID,
            },
        )
        data = response.json()["data"]
        if len(data) >= 1:
            return data[0]["id"]
        else:
            raise StreamerDoesNotExistException

    def update_raid(self, streamer, raid):
        if streamer.raid != raid:
            streamer.raid = raid
            self.post_gql_request({
                "operationName": "JoinRaid",
                "variables": {
                    "input": {
                        "raidID": raid.raid_id
                    }
                },
                "extensions": {
                    "persistedQuery": {
                        "version":
                        1,
                        "sha256Hash":
                        "c6a332a86d1087fbbb1a8623aa01bd1313d2386e7c63be60fdb2d1901f01a4ae",
                    }
                },
            })

            logger.info(
                emoji.emojize(
                    f":performing_arts:  Joining raid from {streamer.username} to {raid.target_login}!",
                    use_aliases=True,
                ))
class Twitch:
    def __init__(self, username, user_agent):
        cookies_path = os.path.join(Path().absolute(), "cookies")
        Path(cookies_path).mkdir(parents=True, exist_ok=True)
        self.cookies_file = os.path.join(cookies_path, f"{username}.pkl")
        self.user_agent = user_agent
        self.twitch_login = TwitchLogin(CLIENT_ID, username, self.user_agent)
        self.running = True

    def login(self):
        if os.path.isfile(self.cookies_file) is False:
            if self.twitch_login.login_flow():
                self.twitch_login.save_cookies(self.cookies_file)
        else:
            self.twitch_login.load_cookies(self.cookies_file)
            self.twitch_login.set_token(self.twitch_login.get_auth_token())

    def update_stream(self, streamer):
        if streamer.stream.update_required() is True:
            stream_info = self.get_stream_info(streamer)
            streamer.stream.update(
                broadcast_id=stream_info["stream"]["id"],
                title=stream_info["broadcastSettings"]["title"],
                game=stream_info["broadcastSettings"]["game"],
                tags=stream_info["stream"]["tags"],
                viewers_count=stream_info["stream"]["viewersCount"],
            )

            event_properties = {
                "channel_id": streamer.channel_id,
                "broadcast_id": streamer.stream.broadcast_id,
                "player": "site",
                "user_id": self.twitch_login.get_user_id(),
            }

            if (
                streamer.stream.game_name() is not None
                and streamer.settings.claim_drops is True
            ):
                event_properties["game"] = streamer.stream.game_name()

            streamer.stream.payload = [
                {"event": "minute-watched", "properties": event_properties}
            ]

    def get_spade_url(self, streamer):
        headers = {"User-Agent": self.user_agent}
        main_page_request = requests.get(streamer.streamer_url, headers=headers)
        response = main_page_request.text
        settings_url = re.search(
            "(https://static.twitchcdn.net/config/settings.*?js)", response
        ).group(1)

        settings_request = requests.get(settings_url, headers=headers)
        response = settings_request.text
        streamer.stream.spade_url = re.search('"spade_url":"(.*?)"', response).group(1)

    def post_gql_request(self, json_data):
        response = requests.post(
            GQLOperations.url,
            json=json_data,
            headers={
                "Authorization": f"OAuth {self.twitch_login.get_auth_token()}",
                "Client-Id": CLIENT_ID,
                "User-Agent": self.user_agent,
            },
        )
        logger.debug(
            f"Data: {json_data}, Status code: {response.status_code}, Content: {response.json()}"
        )
        return response.json()

    def get_broadcast_id(self, streamer):
        json_data = copy.deepcopy(GQLOperations.WithIsStreamLiveQuery)
        json_data["variables"] = {"id": streamer.channel_id}
        response = self.post_gql_request(json_data)
        stream = response["data"]["user"]["stream"]
        if stream is not None:
            return stream["id"]
        else:
            raise StreamerIsOfflineException

    def get_stream_info(self, streamer):
        json_data = copy.deepcopy(GQLOperations.VideoPlayerStreamInfoOverlayChannel)
        json_data["variables"] = {"channel": streamer.username}
        response = self.post_gql_request(json_data)
        if response["data"]["user"]["stream"] is None:
            raise StreamerIsOfflineException
        else:
            return response["data"]["user"]

    def check_streamer_online(self, streamer):
        if time.time() < streamer.offline_at + 60:
            return

        if streamer.is_online is False:
            try:
                self.get_spade_url(streamer)
                self.update_stream(streamer)
            except StreamerIsOfflineException:
                streamer.set_offline()
            else:
                streamer.set_online()
        else:
            try:
                self.update_stream(streamer)
            except StreamerIsOfflineException:
                streamer.set_offline()

    def claim_bonus(self, streamer, claim_id):
        if Settings.logger.less is False:
            logger.info(
                f"Claiming the bonus for {streamer}!", extra={"emoji": ":gift:"}
            )

        json_data = copy.deepcopy(GQLOperations.ClaimCommunityPoints)
        json_data["variables"] = {
            "input": {"channelID": streamer.channel_id, "claimID": claim_id}
        }
        self.post_gql_request(json_data)

    def claim_drop(self, drop_instance_id, streamer=None):
        if streamer is not None:
            logger.info(
                f"Claiming the drop for {streamer}!", extra={"emoji": ":package:"}
            )
        else:
            logger.info(
                f"Startup claim drop {drop_instance_id}", extra={"emoji": ":package:"}
            )

        json_data = copy.deepcopy(GQLOperations.DropsPage_ClaimDropRewards)
        json_data["variables"] = {"input": {"dropInstanceID": drop_instance_id}}
        self.post_gql_request(json_data)

    def search_drop_in_inventory(self, streamer, drop_id):
        inventory = self.__get_inventory()
        for campaign in inventory["dropCampaignsInProgress"]:
            for drop in campaign["timeBasedDrops"]:
                if drop["id"] == drop_id:
                    return drop["self"]
        raise TimeBasedDropNotFound

    def claim_all_drops_from_inventory(self):
        inventory = self.__get_inventory()
        for campaign in inventory["dropCampaignsInProgress"]:
            for drop in campaign["timeBasedDrops"]:
                if drop["self"]["dropInstanceID"] is not None:
                    self.claim_drop(drop["self"]["dropInstanceID"])
                    time.sleep(random.uniform(10, 30))

    def __get_inventory(self):
        response = self.post_gql_request(GQLOperations.Inventory)
        return response["data"]["currentUser"]["inventory"]

    # Load the amount of current points for a channel, check if a bonus is available
    def load_channel_points_context(self, streamer):
        json_data = copy.deepcopy(GQLOperations.ChannelPointsContext)
        json_data["variables"] = {"channelLogin": streamer.username}

        response = self.post_gql_request(json_data)
        if response["data"]["community"] is None:
            raise StreamerDoesNotExistException
        channel = response["data"]["community"]["channel"]
        community_points = channel["self"]["communityPoints"]
        streamer.channel_points = community_points["balance"]

        if community_points["availableClaim"] is not None:
            self.claim_bonus(streamer, community_points["availableClaim"]["id"])

    def make_predictions(self, event):
        decision = event.bet.calculate(event.streamer.channel_points)
        json_data = copy.deepcopy(GQLOperations.MakePrediction)
        json_data["variables"] = {
            "input": {
                "eventID": event.event_id,
                "outcomeID": decision["id"],
                "points": decision["amount"],
                "transactionID": "412118d3********79ac856",  # How we can calculate this?
            }
        }
        return self.post_gql_request(json_data)

    def send_minute_watched_events(self, streamers, watch_streak=False, chunk_size=3):
        while self.running:
            streamers_index = [
                i
                for i in range(0, len(streamers))
                if streamers[i].is_online
                and (
                    streamers[i].online_at == 0
                    or (time.time() - streamers[i].online_at) > 30
                )
            ]

            """
            Check if we need need to change priority based on watch streak
            Viewers receive points for returning for x consecutive streams.
            Each stream must be at least 10 minutes long and it must have been at least 30 minutes since the last stream ended.

            Watch at least 6m for get the +10
            """
            streamers_watching = []
            if watch_streak is True:
                for index in streamers_index:
                    if (
                        streamers[index].settings.watch_streak is True
                        and streamers[index].stream.watch_streak_missing is True
                        and (
                            streamers[index].offline_at == 0
                            or ((time.time() - streamers[index].offline_at) // 60) > 30
                        )
                        and streamers[index].stream.minute_watched < 7
                    ):
                        logger.debug(
                            f"Switch priority: {streamers[index]}, WatchStreak missing is {streamers[index].stream.watch_streak_missing} and minute_watched: {round(streamers[index].stream.minute_watched, 2)}"
                        )
                        streamers_watching.append(index)
                        if len(streamers_watching) == 2:
                            break

            if streamers_watching == []:
                streamers_watching = streamers_index
            else:
                while len(streamers_watching) < 2 and len(streamers_index) > 1:
                    another_streamer_index = streamers_index.pop(0)
                    if another_streamer_index not in streamers_watching:
                        streamers_watching.append(another_streamer_index)

            """
            Twitch has a limit - you can't watch more than 2 channels at one time.
            We take the first two streamers from the list as they have the highest priority (based on order or WatchStreak).
            """
            streamers_watching = streamers_watching[:2]

            for index in streamers_watching:
                next_iteration = time.time() + 60 / len(streamers_watching)

                try:
                    response = requests.post(
                        streamers[index].stream.spade_url,
                        data=streamers[index].stream.encode_payload(),
                        headers={"User-Agent": self.user_agent},
                    )
                    logger.debug(
                        f"Send minute watched request for {streamers[index]} - Status code: {response.status_code}"
                    )
                    if response.status_code == 204:
                        streamers[index].stream.update_minute_watched()
                except requests.exceptions.ConnectionError as e:
                    logger.error(f"Error while trying to watch a minute: {e}")

                # Create chunk of sleep of speed-up the break loop after CTRL+C
                sleep_time = max(next_iteration - time.time(), 0) / chunk_size
                for i in range(0, chunk_size):
                    time.sleep(sleep_time)
                    if self.running is False:
                        break

            if streamers_watching == []:
                time.sleep(60)

    def get_channel_id(self, streamer_username):
        json_response = self.__do_helix_request(f"/users?login={streamer_username}")
        data = json_response["data"]
        if len(data) >= 1:
            return data[0]["id"]
        else:
            raise StreamerDoesNotExistException

    def get_followers(self, first=100):
        followers = []
        pagination = {}
        while 1:
            query = f"/users/follows?from_id={self.twitch_login.get_user_id()}&first={first}"
            if pagination != {}:
                query += f"&after={pagination['cursor']}"

            json_response = self.__do_helix_request(query)
            pagination = json_response["pagination"]
            followers += [fw["to_name"].lower() for fw in json_response["data"]]
            time.sleep(random.uniform(0.3, 0.7))

            if pagination == {}:
                break

        return followers

    def __do_helix_request(self, query, response_as_json=True):
        url = f"{API}/helix/{query.strip('/')}"
        response = self.twitch_login.session.get(url)
        logger.debug(
            f"Query: {query}, Status code: {response.status_code}, Content: {response.json()}"
        )
        return response.json() if response_as_json is True else response

    def update_raid(self, streamer, raid):
        if streamer.raid != raid:
            streamer.raid = raid
            json_data = copy.deepcopy(GQLOperations.JoinRaid)
            json_data["variables"] = {"input": {"raidID": raid.raid_id}}
            self.post_gql_request(json_data)

            logger.info(
                f"Joining raid from {streamer} to {raid.target_login}!",
                extra={"emoji": ":performing_arts:"},
            )

    def viewer_is_mod(self, streamer):
        json_data = copy.deepcopy(GQLOperations.ModViewChannelQuery)
        json_data["variables"] = {"channelLogin": streamer.username}
        response = self.post_gql_request(json_data)
        try:
            streamer.viewer_is_mod = response["data"]["user"]["self"]["isModerator"]
        except (ValueError, KeyError):
            streamer.viewer_is_mod = False