Esempio n. 1
0
    def update_emotes(self):
        log.debug('Updating BTTV Emotes...')
        global_emotes = self.api.get_global_emotes()
        channel_emotes = self.api.get_channel_emotes(
            StreamHelper.get_streamer())

        self.global_emotes = [emote['code'] for emote in global_emotes]
        self.channel_emotes = [emote['code'] for emote in channel_emotes]

        # Store channel emotes in redis
        streamer = StreamHelper.get_streamer()
        key = '{streamer}:emotes:bttv_channel_emotes'.format(streamer=streamer)
        with RedisManager.pipeline_context() as pipeline:
            pipeline.delete(key)
            for emote in channel_emotes:
                pipeline.hset(key, emote['code'], emote['emote_hash'])

        self.all_emotes = []
        with RedisManager.pipeline_context() as pipeline:
            for emote in global_emotes + channel_emotes:
                # Store all possible emotes, with their regex in an easily
                # accessible list.
                self.all_emotes.append(
                    self.build_emote(emote['code'], emote['emote_hash']))

                # Make sure all available emotes are available in redis
                pipeline.hset('global:emotes:bttv', emote['code'],
                              emote['emote_hash'])
Esempio n. 2
0
    def update_emotes(self):
        log.debug('Updating BTTV Emotes...')
        global_emotes = self.bttv_api.get_global_emotes()
        channel_emotes = self.bttv_api.get_channel_emotes(StreamHelper.get_streamer())

        self.global_emotes = [emote['code'] for emote in global_emotes]
        self.channel_emotes = [emote['code'] for emote in channel_emotes]

        # Store channel emotes in redis
        streamer = StreamHelper.get_streamer()
        key = '{streamer}:emotes:bttv_channel_emotes'.format(streamer=streamer)
        with RedisManager.pipeline_context() as pipeline:
            pipeline.delete(key)
            for emote in channel_emotes:
                pipeline.hset(key, emote['code'], emote['emote_hash'])

        self.all_emotes = []
        with RedisManager.pipeline_context() as pipeline:
            for emote in global_emotes + channel_emotes:
                # Store all possible emotes, with their regex in an easily
                # accessible list.
                self.all_emotes.append(self.build_emote(emote['code'], emote['emote_hash']))

                # Make sure all available emotes are available in redis
                pipeline.hset('global:emotes:bttv', emote['code'], emote['emote_hash'])
Esempio n. 3
0
 def handle_emotes(emote_counts):
     # passed dict maps emote code (e.g. "Kappa") to an EmoteInstanceCount instance
     streamer = StreamHelper.get_streamer()
     redis_key = "{streamer}:emotes:count".format(streamer=streamer)
     with RedisManager.pipeline_context() as redis:
         for emote_code, instance_counts in emote_counts.items():
             redis.zincrby(redis_key, instance_counts.count, emote_code)
Esempio n. 4
0
    def add_playsound(bot, source, message, event, args):
        if message is not None and len(message.split(' ')) == 3:
            playsoundCategory = message.split(' ')[0]
            playsoundName = message.split(' ')[1]
            playsoundURL = message.split(' ')[2]
            _savedplaysounds = RedisManager.get().hgetall(
                'playsounds:{}'.format(playsoundCategory))

            if playsoundName in _savedplaysounds:
                bot.say(
                    'Name of playsound already exists. Please choose a different name'
                )
                return False

            elif playsoundURL in _savedplaysounds:
                bot.say(
                    'URL of playsound already exists. Please choose a different link'
                )
                return False

            with RedisManager.pipeline_context() as pipeline:
                pipeline.hset('playsounds:{}'.format(playsoundCategory),
                              playsoundName, playsoundURL)

            bot.say(
                'Successfully added {} with the name {} to the playsound list'.
                format(playsoundURL, playsoundName))

        else:
            bot.say('Invalid syntax. Usage: !add playsound category name url')
            return False
Esempio n. 5
0
def downgrade():
    with RedisManager.pipeline_context() as redis:
        redis.delete(
            "{streamer}:current_quest_emote".format(streamer=streamer))
        redis.delete("{streamer}:current_quest".format(streamer=streamer))
        redis.delete(
            "{streamer}:current_quest_progress".format(streamer=streamer))
        redis.delete("{streamer}:quests:finished".format(streamer=streamer))
def upgrade():
    with RedisManager.pipeline_context() as redis:
        redis.delete(
            '{streamer}:emotes:ffz_channel_emotes'.format(streamer=streamer))
        redis.delete(
            '{streamer}:emotes:bttv_channel_emotes'.format(streamer=streamer))
        redis.delete('global:emotes:ffz_global')
        redis.delete('global:emotes:bttv_global')
Esempio n. 7
0
    def redis_load(self):
        """ Load data from redis using a newly created pipeline """
        if self.redis_loaded:
            return

        with RedisManager.pipeline_context() as pipeline:
            self.queue_up_redis_calls(pipeline)
            data = pipeline.execute()
            self.load_redis_data(data)
Esempio n. 8
0
    def redis_load(self):
        """ Load data from redis using a newly created pipeline """
        if self.redis_loaded:
            return

        with RedisManager.pipeline_context() as pipeline:
            self.queue_up_redis_calls(pipeline)
            data = pipeline.execute()
            self.load_redis_data(data)
Esempio n. 9
0
def upgrade():
    bind = op.get_bind()
    session = Session(bind=bind)

    config_data = RedisManager.get().config_get('maxmemory')
    max_memory = config_data['maxmemory']
    print('redis max memory: {}'.format(max_memory))
    RedisManager.get().config_set('maxmemory', str(int(max_memory) * 10))
    with RedisManager.pipeline_context() as pipeline:
        streamer = pb_config['main']['streamer']

        num_lines_key = '{streamer}:users:num_lines'.format(streamer=streamer)
        ignored_key = '{streamer}:users:ignored'.format(streamer=streamer)
        last_active_key = '{streamer}:users:last_active'.format(
            streamer=streamer)
        last_seen_key = '{streamer}:users:last_seen'.format(streamer=streamer)
        banned_key = '{streamer}:users:banned'.format(streamer=streamer)
        username_raw_key = '{streamer}:users:username_raw'.format(
            streamer=streamer)
        pipeline.delete(num_lines_key, ignored_key, last_active_key,
                        last_seen_key, banned_key, username_raw_key)

        for user in session.query(User):
            if user.num_lines > 0:
                pipeline.zadd(num_lines_key, user.username, user.num_lines)

            if user.ignored:
                pipeline.hset(ignored_key, user.username, 1)

            if user.banned:
                pipeline.hset(banned_key, user.username, 1)

            if user.username != user.username_raw:
                pipeline.hset(username_raw_key, user.username,
                              user.username_raw)

            if user._last_seen:
                pipeline.hset(last_seen_key, user.username,
                              user._last_seen.timestamp())

            if user._last_active:
                pipeline.hset(last_active_key, user.username,
                              user._last_active.timestamp())

    RedisManager.get().config_set('maxmemory', int(max_memory))

    ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table('tb_user') as batch_op:
        batch_op.drop_column('num_lines')
        batch_op.drop_column('ignored')
        batch_op.drop_column('last_active')
        batch_op.drop_column('last_seen')
        batch_op.drop_column('banned')
    ### end Alembic commands ###

    session.commit()
Esempio n. 10
0
    def update_chatters_stage2(self, chatters):
        points = 1 if self.bot.is_online else 0

        log.debug('Updating {0} chatters'.format(len(chatters)))

        self.bot.stream_manager.update_chatters(chatters, self.update_chatters_interval)

        with RedisManager.pipeline_context() as pipeline:
            with DBManager.create_session_scope() as db_session:
                user_models = UserManager.get().bulk_load_user_models(chatters, db_session)
                users = []
                for username in chatters:
                    user_model = user_models.get(username, None)
                    user = UserManager.get().get_user(username, db_session=db_session, user_model=user_model, redis=pipeline)
                    users.append(user)

                more_update_data = {}
                if self.bot.is_online:
                    more_update_data['minutes_in_chat_online'] = self.update_chatters_interval
                else:
                    more_update_data['minutes_in_chat_offline'] = self.update_chatters_interval

                points_to_give_out = {}
                dt_now = datetime.datetime.now().timestamp()
                for user in users:
                    user._set_last_seen(dt_now)

                    num_points = points
                    if user.subscriber:
                        num_points *= 5
                    # TODO: Load user tags during the pipeline redis data fetch
                    if self.bot.streamer == 'forsenlol' and 'trumpsc_sub' in user.get_tags():
                        num_points *= 0.5

                    num_points = int(num_points)

                    if num_points not in points_to_give_out:
                        points_to_give_out[num_points] = []

                    points_to_give_out[num_points].append(user.username)

                    user.save(save_to_db=False)

                for num_points, usernames in points_to_give_out.items():
                    payload = {
                            User.points: User.points + num_points,
                            }
                    if self.bot.is_online:
                        payload[User.minutes_in_chat_online] = User.minutes_in_chat_online + self.update_chatters_interval
                    else:
                        payload[User.minutes_in_chat_offline] = User.minutes_in_chat_offline + self.update_chatters_interval
                    db_session.query(User).filter(User.username.in_(usernames)).\
                            update(payload, synchronize_session=False)

                pipeline.execute()
Esempio n. 11
0
    def update_channel_emotes(self):
        _channel_emotes = self.api.get_channel_emotes(
            StreamHelper.get_streamer())
        self.channel_emotes = {}
        for emote in _channel_emotes:
            self.channel_emotes[emote['code']] = emote['emote_hash']

        # Store channel emotes in redis
        streamer = StreamHelper.get_streamer()
        key = '{streamer}:emotes:bttv_channel_emotes'.format(streamer=streamer)
        with RedisManager.pipeline_context() as pipeline:
            for emote_code, emote_hash in self.channel_emotes.items():
                pipeline.hset(key, emote_code, emote_hash)
Esempio n. 12
0
    def update_channel_emotes(self):
        _channel_emotes = self.api.get_channel_emotes(StreamHelper.get_streamer())
        self.channel_emotes = {}
        for emote in _channel_emotes:
            self.channel_emotes[emote['code']] = emote['emote_hash']

        # Store channel emotes in redis
        streamer = StreamHelper.get_streamer()
        key = '{streamer}:emotes:bttv_channel_emotes'.format(streamer=streamer)
        with RedisManager.pipeline_context() as pipeline:
            pipeline.delete(key)
            for emote_code, emote_hash in self.channel_emotes.items():
                pipeline.hset(key, emote_code, emote_hash)
def upgrade():
    bind = op.get_bind()
    session = Session(bind=bind)

    config_data = RedisManager.get().config_get('maxmemory')
    max_memory = config_data['maxmemory']
    print('redis max memory: {}'.format(max_memory))
    RedisManager.get().config_set('maxmemory', str(int(max_memory) * 10))
    with RedisManager.pipeline_context() as pipeline:
        streamer = pb_config['main']['streamer']

        num_lines_key = '{streamer}:users:num_lines'.format(streamer=streamer)
        ignored_key = '{streamer}:users:ignored'.format(streamer=streamer)
        last_active_key = '{streamer}:users:last_active'.format(streamer=streamer)
        last_seen_key = '{streamer}:users:last_seen'.format(streamer=streamer)
        banned_key = '{streamer}:users:banned'.format(streamer=streamer)
        username_raw_key = '{streamer}:users:username_raw'.format(streamer=streamer)
        pipeline.delete(num_lines_key, ignored_key, last_active_key, last_seen_key, banned_key, username_raw_key)

        for user in session.query(User):
            if user.num_lines > 0:
                pipeline.zadd(num_lines_key, user.username, user.num_lines)

            if user.ignored:
                pipeline.hset(ignored_key, user.username, 1)

            if user.banned:
                pipeline.hset(banned_key, user.username, 1)

            if user.username != user.username_raw:
                pipeline.hset(username_raw_key, user.username, user.username_raw)

            if user._last_seen:
                pipeline.hset(last_seen_key, user.username, user._last_seen.timestamp())

            if user._last_active:
                pipeline.hset(last_active_key, user.username, user._last_active.timestamp())

    RedisManager.get().config_set('maxmemory', int(max_memory))

    ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table('tb_user') as batch_op:
        batch_op.drop_column('num_lines')
        batch_op.drop_column('ignored')
        batch_op.drop_column('last_active')
        batch_op.drop_column('last_seen')
        batch_op.drop_column('banned')
    ### end Alembic commands ###

    session.commit()
Esempio n. 14
0
    def command_masspoints(self, **options):
        bot = options["bot"]
        source = options["source"]
        message = options["message"]

        if not message:
            return False

        pointsArgument = message.split(" ")[0]
        givePoints = 0

        try:
            givePoints = int(pointsArgument)
        except ValueError:
            bot.whisper(source.username_raw, "Error: You must give an integer")
            return False

        currentChatters = bot.twitch_tmi_api.get_chatters(bot.streamer)
        numUsers = len(currentChatters)
        if not currentChatters:
            bot.say("Error fetching chatters")
            return False

        with RedisManager.pipeline_context() as pipeline:
            with DBManager.create_session_scope() as db_session:
                userModels = UserManager.get().bulk_load_user_models(
                    currentChatters, db_session)
                for userName in currentChatters:
                    userModel = userModels.get(userName, None)
                    userInstance = UserManager.get().get_user(
                        userName,
                        db_session=db_session,
                        user_model=userModel,
                        redis=pipeline)

                    # Bot/idler check. Quicker than removing from list?
                    if userInstance.num_lines < 5:
                        numUsers -= 1
                        continue

                    if userInstance.subscriber:
                        userInstance.points += givePoints * self.settings[
                            "sub_points"]
                    else:
                        userInstance.points += givePoints

        bot.say("{} just gave {} viewers {} points each! Enjoy FeelsGoodMan".
                format(source.username_raw, numUsers, givePoints))
Esempio n. 15
0
    def update_emotes(self):
        base_url = 'https://twitchemotes.com/api_cache/v2/{0}.json'
        endpoints = [
            'global',
            'subscriber',
        ]

        twitch_emotes = {}
        twitch_subemotes = {}

        for endpoint in endpoints:
            log.debug('Refreshing {0} emotes...'.format(endpoint))

            try:
                data = requests.get(base_url.format(endpoint)).json()
            except ValueError:
                continue

            if 'channels' in data:
                for channel in data['channels']:
                    chan = data['channels'][channel]
                    # chan_id = chan['id']
                    emotes = chan['emotes']

                    emote_codes = []
                    pending_emotes = []

                    for emote in emotes:
                        emote_codes.append(emote['code'])
                        pending_emotes.append(emote)

                    prefix = os.path.commonprefix(emote_codes)
                    if len(prefix) > 1 and ''.join(
                            filter(lambda c: c.isalpha(), prefix)).islower():
                        for emote in pending_emotes:
                            twitch_emotes[emote['code']] = emote['image_id']
                            twitch_subemotes[emote['code']] = channel
            else:
                for code, emote_data in data['emotes'].items():
                    twitch_emotes[code] = emote_data['image_id']

        with RedisManager.pipeline_context() as pipeline:
            pipeline.delete('global:emotes:twitch_subemotes')
            pipeline.hmset('global:emotes:twitch', twitch_emotes)
            pipeline.hmset('global:emotes:twitch_subemotes', twitch_subemotes)

        self.subemotes = twitch_subemotes
def upgrade():
    bind = op.get_bind()
    session = Session(bind=bind)

    with RedisManager.pipeline_context() as pipeline:
        streamer = pb_config['main']['streamer']
        count_key = '{streamer}:emotes:count'.format(streamer=streamer)
        epmrecord_key = '{streamer}:emotes:epmrecord'.format(streamer=streamer)
        pipeline.delete(count_key, epmrecord_key)
        for emote in session.query(Emote):
            if emote.stats:
                pipeline.zincrby(count_key, emote.code, emote.stats.count)
                pipeline.zincrby(epmrecord_key, emote.code, emote.stats.tm_record)

    op.drop_table('tb_emote_stats')
    op.drop_table('tb_emote')

    session.commit()
def upgrade():
    bind = op.get_bind()
    session = Session(bind=bind)

    with RedisManager.pipeline_context() as pipeline:
        streamer = pb_config['main']['streamer']
        count_key = '{streamer}:emotes:count'.format(streamer=streamer)
        epmrecord_key = '{streamer}:emotes:epmrecord'.format(streamer=streamer)
        pipeline.delete(count_key, epmrecord_key)
        for emote in session.query(Emote):
            if emote.stats:
                pipeline.zincrby(count_key, emote.code, emote.stats.count)
                pipeline.zincrby(epmrecord_key, emote.code, emote.stats.tm_record)

    op.drop_table('tb_emote_stats')
    op.drop_table('tb_emote')

    session.commit()
Esempio n. 18
0
    def update_emotes(self):
        base_url = 'https://twitchemotes.com/api_cache/v2/{0}.json'
        endpoints = [
                'global',
                'subscriber',
                ]

        twitch_emotes = {}
        twitch_subemotes = {}

        for endpoint in endpoints:
            log.debug('Refreshing {0} emotes...'.format(endpoint))
            data = requests.get(base_url.format(endpoint)).json()

            if 'channels' in data:
                for channel in data['channels']:
                    chan = data['channels'][channel]
                    # chan_id = chan['id']
                    emotes = chan['emotes']

                    emote_codes = []
                    pending_emotes = []

                    for emote in emotes:
                        emote_codes.append(emote['code'])
                        pending_emotes.append(emote)

                    prefix = os.path.commonprefix(emote_codes)
                    if len(prefix) > 1 and ''.join(filter(lambda c: c.isalpha(), prefix)).islower():
                        for emote in pending_emotes:
                            twitch_emotes[emote['code']] = emote['image_id']
                            twitch_subemotes[emote['code']] = channel
            else:
                for code, emote_data in data['emotes'].items():
                    twitch_emotes[code] = emote_data['image_id']

        with RedisManager.pipeline_context() as pipeline:
            pipeline.delete('global:emotes:twitch_subemotes')
            pipeline.hmset('global:emotes:twitch', twitch_emotes)
            pipeline.hmset('global:emotes:twitch_subemotes', twitch_subemotes)

        self.subemotes = twitch_subemotes
Esempio n. 19
0
    def update_chatters_stage2(self, chatters):
        points = 1 if self.bot.is_online else 0

        log.debug('Updating {0} chatters'.format(len(chatters)))

        self.bot.stream_manager.update_chatters(chatters,
                                                self.update_chatters_interval)

        with RedisManager.pipeline_context() as pipeline:
            with DBManager.create_session_scope() as db_session:
                user_models = UserManager.get().bulk_load_user_models(
                    chatters, db_session)
                users = []
                for username in chatters:
                    user_model = user_models.get(username, None)
                    user = UserManager.get().get_user(username,
                                                      db_session=db_session,
                                                      user_model=user_model,
                                                      redis=pipeline)
                    users.append(user)

                more_update_data = {}
                if self.bot.is_online:
                    more_update_data[
                        'minutes_in_chat_online'] = self.update_chatters_interval
                else:
                    more_update_data[
                        'minutes_in_chat_offline'] = self.update_chatters_interval

                points_to_give_out = {}
                dt_now = datetime.datetime.now().timestamp()
                for user in users:
                    user._set_last_seen(dt_now)

                    num_points = points
                    if user.subscriber:
                        num_points *= 5
                    # TODO: Load user tags during the pipeline redis data fetch
                    if self.bot.streamer == 'forsenlol' and 'trumpsc_sub' in user.get_tags(
                    ):
                        num_points *= 0.5

                    num_points = int(num_points)

                    if num_points not in points_to_give_out:
                        points_to_give_out[num_points] = []

                    points_to_give_out[num_points].append(user.username)

                    user.save(save_to_db=False)

                for num_points, usernames in points_to_give_out.items():
                    payload = {
                        User.points: User.points + num_points,
                    }
                    if self.bot.is_online:
                        payload[
                            User.
                            minutes_in_chat_online] = User.minutes_in_chat_online + self.update_chatters_interval
                    else:
                        payload[
                            User.
                            minutes_in_chat_offline] = User.minutes_in_chat_offline + self.update_chatters_interval
                    db_session.query(User).filter(User.username.in_(usernames)).\
                            update(payload, synchronize_session=False)

                pipeline.execute()
Esempio n. 20
0
def up(cursor, bot):
    redis = RedisManager.get()

    # new: twitch_id (will be renamed to "id" after the migration below...)
    cursor.execute('ALTER TABLE "user" ADD COLUMN twitch_id TEXT'
                   )  # nullable for now. Will be NOT NULL later
    cursor.execute('CREATE UNIQUE INDEX ON "user"(twitch_id)')

    # username -> login
    cursor.execute('ALTER TABLE "user" RENAME COLUMN username TO login')
    cursor.execute("DROP INDEX user_username_idx")  # UNIQUE
    cursor.execute(
        'ALTER TABLE "user" DROP CONSTRAINT user_username_key')  # PRIMARY KEY
    cursor.execute(
        'CREATE INDEX ON "user"(login)')  # create a new index, non-unique

    # username_raw -> name
    cursor.execute('ALTER TABLE "user" RENAME COLUMN username_raw TO name')

    # level: add default
    cursor.execute('ALTER TABLE "user" ALTER COLUMN level SET DEFAULT 100')

    # points: add default
    cursor.execute('ALTER TABLE "user" ALTER COLUMN points SET DEFAULT 0')

    # subscriber: add default
    cursor.execute(
        'ALTER TABLE "user" ALTER COLUMN subscriber SET DEFAULT FALSE')

    # new: moderator
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN moderator BOOLEAN NOT NULL DEFAULT FALSE'
    )

    # new: num_lines
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN num_lines BIGINT NOT NULL DEFAULT 0')
    cursor.execute('CREATE INDEX ON "user"(num_lines)'
                   )  # same reason as in 0002_create_index_on_user_points.py

    def safe_to_int(input):
        try:
            return int(input)
        except ValueError:
            return None

    # # threaded user id migration
    # import time
    # import random
    # import threading
    # import queue
    # log.info("start user id migration")
    # # migrate users to ID
    # cursor.execute('SELECT COUNT(*) FROM "user"')
    # users_count = cursor.fetchone()[0]

    # q = queue.Queue(500)
    # update_q = queue.Queue()

    # def update_rows(all_user_data):
    #     # log.info("updating rows")
    #     for id, basics in all_user_data:
    #         if basics is not None:
    #             try:
    #                 cursor.execute(
    #                     'UPDATE "user" SET twitch_id = %s, login = %s, name = %s WHERE id = %s' ,
    #                     (basics.id, basics.login, basics.name, id),
    #                 )
    #             except:
    #                 log.exception("Error in update rows")
    #                 log.info(f"XXX basics: {basics.login} - {basics.id}")
    #                 raise

    # class GetAndLockRows(threading.Thread):
    #     def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, verbose=None):
    #         super(GetAndLockRows, self).__init__()
    #         self.target = target
    #         self.name = name

    #     def run(self):
    #         cursor.execute('DECLARE all_users CURSOR FOR SELECT id, login FROM "user" ORDER BY id FOR UPDATE')

    #         offset = 0

    #         while True:
    #             while not update_q.empty():
    #                 update_rows(update_q.get())

    #             while q.full():
    #                 while not update_q.empty():
    #                     update_rows(update_q.get())
    #                 log.info("helix api queue is full, waiting")
    #                 time.sleep(random.random() * 0.5)

    #             cursor.execute("FETCH FORWARD 100 FROM all_users")
    #             rows = cursor.fetchall()  # = [(id, login), (id, login), (id, login), ...]
    #             if len(rows) <= 0:
    #                 break

    #             offset += 100
    #             log.info(f"{offset}/{users_count}")
    #             q.put(rows)

    #             while not update_q.empty():
    #                 update_rows(update_q.get())

    #         cursor.execute("CLOSE all_users")

    #         log.info("Wait for q queue to fully empty")
    #         q.join()
    #         log.info("q queue is fully empty, process last users in update_q")

    #         while not update_q.empty():
    #             update_rows(update_q.get())

    #         log.info("done updating all rows")

    # class ConsumeRowsAndGetHelixData(threading.Thread):
    #     def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, verbose=None):
    #         super(ConsumeRowsAndGetHelixData, self).__init__()
    #         self.target = target
    #         self.name = name
    #         self.running = True

    #     def run(self):
    #         while self.running:
    #             while not q.empty():
    #                 rows = q.get()
    #                 usernames_to_fetch = [t[1] for t in rows]
    #                 all_user_basics = retry_call(
    #                     bot.twitch_helix_api.bulk_get_user_basics_by_login, fargs=[usernames_to_fetch], tries=3, delay=5
    #                 )
    #                 update_q.put(zip((t[0] for t in rows), all_user_basics))
    #                 q.task_done()

    #             time.sleep(random.random() * 0.1)

    # if users_count > 0:
    #     get_and_lock_rows = GetAndLockRows(name="get_and_lock_rows")
    #     get_and_lock_rows.start()
    #     consume_rows_and_get_helix_data = ConsumeRowsAndGetHelixData(name="consume_rows_and_get_helix_data")
    #     consume_rows_and_get_helix_data.start()
    #     get_and_lock_rows.join()
    #     consume_rows_and_get_helix_data.running = False
    #     consume_rows_and_get_helix_data.join()

    # log.info("done with userid migration")

    # points: INT -> BIGINT
    log.info("change points to BIGINT")
    cursor.execute(
        'ALTER TABLE "user" ALTER COLUMN points SET DATA TYPE BIGINT')

    log.info("import lines from redis")
    for login, num_lines in redis.zscan_iter(
            f"{bot.streamer.login}:users:num_lines",
            score_cast_func=safe_to_int):
        if num_lines is None:
            # invalid amount in redis, skip
            continue
        cursor.execute('UPDATE "user" SET num_lines = %s WHERE login = %s',
                       (num_lines, login))

    # new: tokens
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN tokens INT NOT NULL DEFAULT 0')
    for login, tokens in redis.zscan_iter(f"{bot.streamer.login}:users:tokens",
                                          score_cast_func=safe_to_int):
        if tokens is None:
            # invalid amount in redis, skip
            continue
        if tokens > 50:
            tokens = 50
        cursor.execute('UPDATE "user" SET tokens = %s WHERE login = %s',
                       (tokens, login))

    # new: last_seen
    log.info("import last seen from redis")
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN last_seen TIMESTAMPTZ DEFAULT NULL')
    for login, last_seen_raw in redis.hscan_iter(
            f"{bot.streamer.login}:users:last_seen"):
        last_seen = datetime.datetime.fromtimestamp(float(last_seen_raw),
                                                    tz=datetime.timezone.utc)
        cursor.execute('UPDATE "user" SET last_seen = %s WHERE login = %s',
                       (last_seen, login))

    # new: last_active
    log.info("import last active from redis")
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN last_active TIMESTAMPTZ DEFAULT NULL')
    for login, last_active_raw in redis.hscan_iter(
            f"{bot.streamer.login}:users:last_active"):
        last_seen = datetime.datetime.fromtimestamp(float(last_active_raw),
                                                    tz=datetime.timezone.utc)
        cursor.execute('UPDATE "user" SET last_active = %s WHERE login = %s',
                       (last_seen, login))

    # minutes_in_chat_{online,offline} -> INTERVAL type and renamed to time_in_chat_...
    cursor.execute(
        'ALTER TABLE "user" RENAME COLUMN minutes_in_chat_online TO time_in_chat_online'
    )
    cursor.execute(
        'ALTER TABLE "user" RENAME COLUMN minutes_in_chat_offline TO time_in_chat_offline'
    )
    cursor.execute("""
    ALTER TABLE "user" ALTER COLUMN time_in_chat_online SET DATA TYPE INTERVAL
    USING make_interval(mins := time_in_chat_online)
    """)
    cursor.execute("""
    ALTER TABLE "user" ALTER COLUMN time_in_chat_offline SET DATA TYPE INTERVAL
    USING make_interval(mins := time_in_chat_offline)
    """)
    cursor.execute(
        "ALTER TABLE \"user\" ALTER COLUMN time_in_chat_online SET DEFAULT INTERVAL '0 minutes'"
    )
    cursor.execute(
        "ALTER TABLE \"user\" ALTER COLUMN time_in_chat_offline SET DEFAULT INTERVAL '0 minutes'"
    )

    # new: ignored
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN ignored BOOLEAN NOT NULL DEFAULT FALSE')
    for login in redis.hkeys(f"{bot.streamer.login}:users:ignored"):
        cursor.execute('UPDATE "user" SET ignored = TRUE WHERE login = %s',
                       (login, ))

    # new: banned
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN banned BOOLEAN NOT NULL DEFAULT FALSE')
    for login in redis.hkeys(f"{bot.streamer.login}:users:banned"):
        cursor.execute('UPDATE "user" SET banned = TRUE WHERE login = %s',
                       (login, ))

    # note: username_raw is not migrated in from redis, since the username_raw data will be fetched
    # fresh from the Twitch API below anyways.

    # migrate users to ID
    cursor.execute('SELECT COUNT(*) FROM "user"')
    users_count = cursor.fetchone()[0]

    # create Server-side cursor
    cursor.execute(
        'DECLARE all_users CURSOR FOR SELECT id, login FROM "user" ORDER BY id FOR UPDATE'
    )

    offset = 0
    while True:
        cursor.execute("FETCH FORWARD 100 FROM all_users")
        rows = cursor.fetchall(
        )  # = [(id, login), (id, login), (id, login), ...]

        if len(rows) <= 0:
            # done!
            break

        offset += 100
        log.info(f"{offset}/{users_count}")

        usernames_to_fetch = [t[1] for t in rows]
        all_user_basics = retry_call(
            bot.twitch_helix_api.bulk_get_user_basics_by_login,
            fargs=[usernames_to_fetch],
            tries=3,
            delay=5)

        for id, basics in zip((t[0] for t in rows), all_user_basics):
            if basics is not None:
                cursor.execute(
                    'UPDATE "user" SET twitch_id = %s, login = %s, name = %s WHERE id = %s',
                    (basics.id, basics.login, basics.name, id),
                )

    # release the cursor again
    cursor.execute("CLOSE all_users")

    # update admin logs to primary-key by Twitch ID.
    admin_logs_key = f"{bot.streamer.login}:logs:admin"
    all_admin_logs = redis.lrange(admin_logs_key, 0, -1)
    # all_admin_logs and new_log_entries are in newest -> oldest order
    new_log_entries = []
    for idx, raw_log_entry in enumerate(all_admin_logs):
        log_entry = json.loads(raw_log_entry)
        old_internal_id = log_entry["user_id"]
        cursor.execute('SELECT twitch_id FROM "user" WHERE id = %s',
                       (old_internal_id, ))
        row = cursor.fetchone()
        if row is not None and row[0] is not None:
            log_entry["user_id"] = row[0]
        else:
            log_entry["user_id"] = None
        new_log_entries.append(log_entry)

    @contextmanager
    def also_move_pkey(table, column):
        cursor.execute(f"ALTER TABLE {table} DROP CONSTRAINT {table}_pkey")
        yield
        cursor.execute(f"ALTER TABLE {table} ADD PRIMARY KEY ({column})")

    def update_foreign_key(table, column, nullable=False):
        cursor.execute(
            f"ALTER TABLE {table} DROP CONSTRAINT {table}_{column}_fkey, ALTER COLUMN {column} SET DATA TYPE TEXT"
        )
        if not nullable:
            cursor.execute(
                f"ALTER TABLE {table} ALTER COLUMN {column} DROP NOT NULL")
        cursor.execute(
            f'UPDATE {table} T SET {column} = (SELECT twitch_id FROM "user" WHERE id = T.{column}::int)'
        )
        if not nullable:
            cursor.execute(f"DELETE FROM {table} WHERE {column} IS NULL")
            cursor.execute(
                f"ALTER TABLE {table} ALTER COLUMN {column} SET NOT NULL")

        if nullable:
            on_delete_action = "SET NULL"
        else:
            on_delete_action = "CASCADE"
        cursor.execute(
            f'ALTER TABLE {table} ADD FOREIGN KEY ({column}) REFERENCES "user"(twitch_id) ON DELETE {on_delete_action}'
        )

    update_foreign_key("banphrase_data", "added_by", nullable=True)
    update_foreign_key("banphrase_data", "edited_by", nullable=True)
    update_foreign_key("command_data", "added_by", nullable=True)
    update_foreign_key("command_data", "edited_by", nullable=True)
    update_foreign_key("hsbet_bet", "user_id")
    update_foreign_key("pleblist_song", "user_id", nullable=True)
    update_foreign_key("prediction_run_entry", "user_id")
    update_foreign_key("roulette", "user_id")
    with also_move_pkey("user_duel_stats", "user_id"):
        update_foreign_key("user_duel_stats", "user_id")

    # delete users that were not found. farewell...
    # the ON DELETE rules we set before will make these users disappear from other data structures too
    cursor.execute('DELETE FROM "user" WHERE twitch_id IS NULL')

    # drop the internal ID column
    cursor.execute('ALTER TABLE "user" DROP COLUMN id')

    # we can also now set the display name to be non-null
    # since we definitely eliminated any legacy rows that might be missing that value
    cursor.execute('ALTER TABLE "user" ALTER COLUMN name SET NOT NULL')

    # Rename the twitch_id to id, and make it primary key
    cursor.execute('ALTER TABLE "user" RENAME COLUMN twitch_id TO id')
    cursor.execute('ALTER TABLE "user" ALTER COLUMN id SET NOT NULL')
    cursor.execute('ALTER TABLE "user" ADD PRIMARY KEY(id)')

    def delete_foreign_key(table, column):
        cursor.execute(
            f"ALTER TABLE {table} DROP CONSTRAINT {table}_{column}_fkey")

    def add_foreign_key_again(table, column, nullable=False):
        if nullable:
            on_delete_action = "SET NULL"
        else:
            on_delete_action = "CASCADE"
        cursor.execute(
            f'ALTER TABLE {table} ADD FOREIGN KEY ({column}) REFERENCES "user"(id) ON DELETE {on_delete_action}'
        )

    # now this is special: We first had a users table with a primary key on the internal ID,
    # then added a UNIQUE INDEX on (twitch_id) so we could have foreign keys pointing
    # to the twitch_id (in update_foreign_key). We needed to definitely add those foreign keys back
    # so we can get the cascading effect from the DELETE FROM "user" statement.
    # Now we are left with two UNIQUE indexes indexing the same thing:
    # - "user_pkey" PRIMARY KEY, btree (id)
    # - "user_twitch_id_idx" UNIQUE, btree (id)
    # all those foreign keys we created earlier are all referring (depend) on user_twitch_id_idx.
    # If we want to eliminate user_twitch_id_idx, we have to drop all those foreign key constraints,
    # DROP the index, and then create them again to make them use the primary key index (as they should).
    # so this is what the following block does.
    delete_foreign_key("banphrase_data", "added_by")
    delete_foreign_key("banphrase_data", "edited_by")
    delete_foreign_key("command_data", "added_by")
    delete_foreign_key("command_data", "edited_by")
    delete_foreign_key("hsbet_bet", "user_id")
    delete_foreign_key("pleblist_song", "user_id")
    delete_foreign_key("prediction_run_entry", "user_id")
    delete_foreign_key("roulette", "user_id")
    delete_foreign_key("user_duel_stats", "user_id")

    cursor.execute("DROP INDEX user_twitch_id_idx")

    add_foreign_key_again("banphrase_data", "added_by", nullable=True)
    add_foreign_key_again("banphrase_data", "edited_by", nullable=True)
    add_foreign_key_again("command_data", "added_by", nullable=True)
    add_foreign_key_again("command_data", "edited_by", nullable=True)
    add_foreign_key_again("hsbet_bet", "user_id")
    add_foreign_key_again("pleblist_song", "user_id", nullable=True)
    add_foreign_key_again("prediction_run_entry", "user_id")
    add_foreign_key_again("roulette", "user_id")
    add_foreign_key_again("user_duel_stats", "user_id")

    # new: login_last_updated (+triggers)
    cursor.execute(
        'ALTER TABLE "user" ADD COLUMN login_last_updated TIMESTAMPTZ NOT NULL DEFAULT now()'
    )
    cursor.execute("""
    CREATE FUNCTION trigger_user_update_login_last_updated()
    RETURNS trigger AS
    $$
    BEGIN
        NEW.login_last_updated = now();
        RETURN NEW;
    END
    $$
    LANGUAGE plpgsql
    """)
    cursor.execute("""
    CREATE TRIGGER user_login_update
    AFTER UPDATE OF login ON "user"
    FOR EACH ROW EXECUTE PROCEDURE trigger_user_update_login_last_updated()
    """)

    with RedisManager.pipeline_context() as redis_pipeline:
        # Overwrite admin logs
        redis_pipeline.delete(admin_logs_key)
        if len(new_log_entries) > 0:
            redis_pipeline.rpush(
                admin_logs_key,
                *[json.dumps(entry) for entry in new_log_entries])

        # Delete data that was moved in
        redis_pipeline.delete(
            f"{bot.streamer.login}:users:num_lines",
            f"{bot.streamer.login}:users:tokens",
            f"{bot.streamer.login}:users:last_seen",
            f"{bot.streamer.login}:users:last_active",
            f"{bot.streamer.login}:users:username_raw",
            f"{bot.streamer.login}:users:ignored",
            f"{bot.streamer.login}:users:banned",
        )
Esempio n. 21
0
    def parse_message_twitch_emotes(self, source, message, tag, whisper):
        message_emotes = []
        new_user_tags = []

        # Twitch Emotes
        if tag:
            emote_data = tag.split('/')
            for emote in emote_data:
                try:
                    emote_id, emote_occurrences = emote.split(':')
                    emote_indices = emote_occurrences.split(',')

                    # figure out how many times the emote occured in the message
                    emote_count = len(emote_indices)

                    first_index, last_index = emote_indices[0].split('-')
                    first_index = int(first_index)
                    last_index = int(last_index)
                    emote_code = message[first_index:last_index + 1]
                    if emote_code[0] == ':':
                        emote_code = emote_code.upper()
                    message_emotes.append({
                        'code': emote_code,
                        'twitch_id': emote_id,
                        'start': first_index,
                        'end': last_index,
                        'count': emote_count,
                        })

                    sub = self.subemotes.get(emote_code, None)
                    if sub:
                        new_user_tags.append('{sub}_sub'.format(sub=sub))
                except:
                    log.exception('Exception caught while splitting emote data')
                    log.error('Emote data: {}'.format(emote_data))
                    log.error('Message: {}'.format(message))

        # BTTV Emotes
        for emote in self.bttv_emote_manager.valid_emotes:
            num = 0
            start = -1
            end = -1
            for match in emote['regex'].finditer(message):
                num += 1
                if num == 1:
                    start = match.span()[0]
                    end = match.span()[1] - 1  # don't ask me
            if num > 0:
                message_emotes.append({
                    'code': emote['code'],
                    'bttv_hash': emote['emote_hash'],
                    'start': start,
                    'end': end,
                    'count': num,
                    })

        # FFZ Emotes
        for emote in self.ffz_emote_manager.valid_emotes:
            num = 0
            start = -1
            end = -1
            for match in emote['regex'].finditer(message):
                num += 1
                if num == 1:
                    start = match.span()[0]
                    end = match.span()[1] - 1  # don't ask me
            if num > 0:
                message_emotes.append({
                    'code': emote['code'],
                    'ffz_id': emote['emote_id'],
                    'start': start,
                    'end': end,
                    'count': num,
                    })

        if len(message_emotes) > 0 or len(new_user_tags) > 0:
            streamer = StreamHelper.get_streamer()
            with RedisManager.pipeline_context() as pipeline:
                if not whisper:
                    for emote in message_emotes:
                        pipeline.zincrby('{streamer}:emotes:count'.format(streamer=streamer), emote['code'], emote['count'])
                        self.epm_incr(emote['code'], emote['count'])

                user_tags = source.get_tags()

                for tag in new_user_tags:
                    user_tags[tag] = (datetime.datetime.now() + datetime.timedelta(days=15)).timestamp()
                source.set_tags(user_tags, redis=pipeline)

        return message_emotes
Esempio n. 22
0
    def parse_message_twitch_emotes(self, source, message, tag, whisper):
        message_emotes = []
        new_user_tags = []

        # Twitch Emotes
        if tag:
            emote_data = tag.split('/')
            for emote in emote_data:
                try:
                    emote_id, emote_occurrences = emote.split(':')
                    emote_indices = emote_occurrences.split(',')

                    # figure out how many times the emote occured in the message
                    emote_count = len(emote_indices)

                    first_index, last_index = emote_indices[0].split('-')
                    first_index = int(first_index)
                    last_index = int(last_index)
                    emote_code = message[first_index:last_index + 1]
                    if emote_code[0] == ':':
                        emote_code = emote_code.upper()
                    message_emotes.append({
                        'code': emote_code,
                        'twitch_id': emote_id,
                        'start': first_index,
                        'end': last_index,
                        'count': emote_count,
                    })

                    sub = self.subemotes.get(emote_code, None)
                    if sub:
                        new_user_tags.append('{sub}_sub'.format(sub=sub))
                except:
                    log.exception(
                        'Exception caught while splitting emote data')
                    log.error('Emote data: {}'.format(emote_data))
                    log.error('Message: {}'.format(message))

        # BTTV Emotes
        for emote in self.bttv_emote_manager.all_emotes:
            num = 0
            start = -1
            end = -1
            for match in emote['regex'].finditer(message):
                num += 1
                if num == 1:
                    start = match.span()[0]
                    end = match.span()[1] - 1  # don't ask me
            if num > 0:
                message_emotes.append({
                    'code': emote['code'],
                    'bttv_hash': emote['emote_hash'],
                    'start': start,
                    'end': end,
                    'count': num,
                })

        # FFZ Emotes
        for emote in self.ffz_emote_manager.all_emotes:
            num = 0
            start = -1
            end = -1
            for match in emote['regex'].finditer(message):
                num += 1
                if num == 1:
                    start = match.span()[0]
                    end = match.span()[1] - 1  # don't ask me
            if num > 0:
                message_emotes.append({
                    'code': emote['code'],
                    'ffz_id': emote['emote_id'],
                    'start': start,
                    'end': end,
                    'count': num,
                })

        if len(message_emotes) > 0 or len(new_user_tags) > 0:
            streamer = StreamHelper.get_streamer()
            with RedisManager.pipeline_context() as pipeline:
                if not whisper:
                    for emote in message_emotes:
                        pipeline.zincrby(
                            '{streamer}:emotes:count'.format(
                                streamer=streamer), emote['code'],
                            emote['count'])
                        self.epm_incr(emote['code'], emote['count'])

                user_tags = source.get_tags()

                for tag in new_user_tags:
                    user_tags[tag] = (datetime.datetime.now() +
                                      datetime.timedelta(days=15)).timestamp()
                source.set_tags(user_tags, redis=pipeline)

        return message_emotes