Пример #1
0
def handle_delete_last_command(update: Update, context: CallbackContext):
    chat_id = update.message.chat_id
    queue_size = redis.get(f'chat:{chat_id}:queue_size')
    if queue_size is None:
        queue_size = 0
    queue_size = int(queue_size)
    if queue_size <= 0:
        context.bot.send_message(chat_id=chat_id, text='Queue is empty')
        return
    queue_size -= 1

    tweet_text = redis.get(f'chat:{chat_id}:queue:{queue_size}:text') or ''
    tg_attachment_id = redis.get(
        f'chat:{chat_id}:queue:{queue_size}:tg_attachment_id')

    redis.delete(f'chat:{chat_id}:queue:{queue_size}:text')
    redis.delete(f'chat:{chat_id}:queue:{queue_size}:tg_attachment_id')

    redis.set(f'chat:{chat_id}:queue_size', queue_size)

    context.bot.send_message(
        chat_id=chat_id,
        text="I've deleted your latest tweet. This was the text: " +
        tweet_text)
    if tg_attachment_id:
        context.bot.send_message(chat_id=chat_id,
                                 text='It also had an attachment')
Пример #2
0
 def get(self, me):
     """Who's pinterested in me?"""
     if 'unread' in request.values:
         key = 'users:%s:match:unread' % me
         unread = list(redis.smembers(key))
         redis.delete(key)
         return unread
     else:
         return list(redis.smembers('users:%s:match' % me))
Пример #3
0
 def get(self, me):
     """Who's pinterested in me?"""
     if 'unread' in request.values:
         key = 'users:%s:match:unread' % me
         unread = list(redis.smembers(key))
         redis.delete(key)
         return unread
     else:
         return list(redis.smembers('users:%s:match' % me))
Пример #4
0
def resetRedisQueues():

    redis = common.redis.get_redis_queue_conn()

    # Since we restarted, clear the url queues.
    print("Flushing redis queues")
    dropped = 0
    for key in redis.scan_iter(match="*"):
        redis.delete(key)
        dropped += 1
    print("Queues flushed. Deleted %s netlocs" % dropped)
Пример #5
0
    def delete(self, user):
        """Delete a single user."""

        redis.srem('users', user)

        sets = ['sex_am', 'sex_want', 'zip', 'zipshort', 'birthday', 'birthyear']
        for key in sets:
            value = redis.get('users:%s:%s' % (user, key))
            if value is not None:
                redis.srem('%s:%s' % (key, value), user)

        keys = ['sex_am', 'sex_want', 'ugos', 'zip', 'zipshort', 'birthday', 'birthyears']
        for key in keys:
            redis.delete('users:%s:%s' % (user, key))

        redis.zrem('birthyears', user)

        return Response()
Пример #6
0
    def delete(self, user):
        """Delete a single user."""

        redis.srem('users', user)

        sets = [
            'sex_am', 'sex_want', 'zip', 'zipshort', 'birthday', 'birthyear'
        ]
        for key in sets:
            value = redis.get('users:%s:%s' % (user, key))
            if value is not None:
                redis.srem('%s:%s' % (key, value), user)

        keys = [
            'sex_am', 'sex_want', 'ugos', 'zip', 'zipshort', 'birthday',
            'birthyears'
        ]
        for key in keys:
            redis.delete('users:%s:%s' % (user, key))

        redis.zrem('birthyears', user)

        return Response()
Пример #7
0
def loop():
    now = datetime.now()
    logging.debug(f'Running with timestamp {now}')
    for key in redis.keys('*:settings:tweet_time'):
        chat_id = key.split(':')[1]
        desired_time = redis.get(key).split(':')
        desired_hour = int(desired_time[0])
        desired_minute = int(desired_time[1])
        local_now = now.astimezone(timezone(redis.get(f'chat:{chat_id}:settings:timezone') or 'UTC'))
        if desired_hour != local_now.hour or desired_minute != local_now.minute:
            continue
        queue_size = redis.get(f'chat:{chat_id}:queue_size') or 0
        queue_size = int(queue_size)
        if queue_size <= 0:
            continue
        queue_size -= 1
        tweet_text = redis.get(f'chat:{chat_id}:queue:0:text')
        tg_attachment_id = redis.get(f'chat:{chat_id}:queue:0:tg_attachment_id')

        twitter = get_twitter_api(chat_id)

        if not tg_attachment_id:
            try:
                status = twitter.update_status(tweet_text)
            except tweepy.error.TweepError as e:
                logging.warning(f'Unable to tweet for chat:{chat_id}:queue:{queue_size} (without attachment) '
                                f'Reason: {e.reason}')
                telegram_updater.bot.send_message(chat_id=chat_id, text=e.reason)
                telegram_updater.bot.send_message(chat_id=chat_id, text='Sorry, I was unable to post your daily tweet. '
                                                                        'This is your tweet:')
                telegram_updater.bot.send_message(chat_id=chat_id, text=tweet_text)
                telegram_updater.bot.send_message(chat_id=chat_id,
                                                  text='You may delete it from the queue: /delete_last')
                return
        else:
            # download telegram photo
            logging.debug('Downloading telegram attachment')
            file = telegram_updater.bot.getFile(tg_attachment_id)
            filename = FILE_STORAGE_PATH / tg_attachment_id
            file.download(filename)

            try:
                status = twitter.update_with_media(filename, tweet_text)
            except tweepy.error.TweepError as e:
                logging.warning(f'Unable to tweet for chat:{chat_id}:queue:{queue_size} (with attachment). '
                                f'Reason: {e.reason}')
                telegram_updater.bot.send_message(chat_id=chat_id, text=e.reason)
                telegram_updater.bot.send_message(chat_id=chat_id,
                                                  text='Sorry, I was unable to post your daily tweet. '
                                                       'This is your tweet, and it contained one attachment:')
                telegram_updater.bot.send_message(chat_id=chat_id, text=tweet_text)
                telegram_updater.bot.send_message(chat_id=chat_id,
                                                  text='You may delete it from the queue: /delete_last')
                return
            finally:
                filename.unlink(missing_ok=True)
        logging.debug('Deleting stored tweet and attachment id')
        # Move all elements in the queue one
        for i in range(queue_size):
            k0 = f'chat:{chat_id}:queue:{i}:text'
            k1 = f'chat:{chat_id}:queue:{i+1}:text'
            l0 = f'chat:{chat_id}:queue:{i}:tg_attachment_id'
            l1 = f'chat:{chat_id}:queue:{i + 1}:tg_attachment_id'
            redis.set(k0, redis.get(k1) or '')
            redis.set(l0, redis.get(l1) or '')
        # The last element is now duplicated; delete it
        redis.delete(f'chat:{chat_id}:queue:{queue_size}:text')
        redis.delete(f'chat:{chat_id}:queue:{queue_size}:tg_attachment_id')

        tweet_url = build_tweet_url(status)
        logging.info(f'Tweeted: {tweet_url} for chat_id {chat_id}')
        telegram_updater.bot.send_message(chat_id=chat_id, text=f'I just tweeted this: {tweet_url}\n'
                                                                f'\n'
                                                                f'Tweets in queue: {queue_size}')
        if queue_size <= 0:
            telegram_updater.bot.send_message(chat_id=chat_id, text="Your queue is now empty. I will not tweet "
                                                                    "tomorrow if you won't give me new stuff!")
        redis.set(f'chat:{chat_id}:queue_size', queue_size)
Пример #8
0
def mq_clean():
    collections = [
        'stats_mq',
        'stats_batch_run',
        'stats_batch_stage',
        'mq_spider_undo',
        'mq_spider_ready',
        'mq_spider_doing',
        'mq_spider_done',
        'mq_mirror_undo',
        'mq_mirror_ready',
        'mq_mirror_doing',
        'mq_mirror_done',
        'mq_piping_undo',
        'mq_piping_ready',
        'mq_piping_doing',
        'mq_piping_done',
        'mq_notify_undo',
        'mq_notify_ready',
        'mq_notify_doing',
        'mq_notify_done',
        'mq_snapshot_undo',
        'mq_snapshot_ready',
        'mq_snapshot_doing',
        'mq_snapshot_done',
    ]
    for key in collections:
        mongoMq[key].drop()

    for i in ['execute', 'spider', 'mirror', 'piping', 'notify', 'snapshot']:
        key = "mq_%s_ready" % i
        redis.delete(key)

    mongoMq['stats_mq'].insert({
        'mq_key': 'execute',
        'undo': 0,
        'ready': 0,
        'doing': 0,
        'done': 0
    })
    mongoMq['stats_mq'].insert({
        'mq_key': 'spider',
        'undo': 0,
        'ready': 0,
        'doing': 0,
        'done': 0
    })
    mongoMq['stats_mq'].insert({
        'mq_key': 'mirror',
        'undo': 0,
        'ready': 0,
        'doing': 0,
        'done': 0
    })
    mongoMq['stats_mq'].insert({
        'mq_key': 'notify',
        'undo': 0,
        'ready': 0,
        'doing': 0,
        'done': 0
    })
    mongoMq['stats_mq'].insert({
        'mq_key': 'piping',
        'undo': 0,
        'ready': 0,
        'doing': 0,
        'done': 0
    })
Пример #9
0
def clean_all():
    #危险,慎用
    return True
    #db.exec("TRUNCATE TABLE app CASCADE;")
    for table in [
            'task', 'task_execute', 'site', 'domain', 'setting', 'proxy',
            'task_notify', 'task_piping', 'dk_black_list', 'dk_white_list',
            'dk_filterword', 'task_piping_result', 'task_piping_snapshot',
            'spider_url', 'spiderjs_url', 'piping_extend', 'scheduler'
    ]:
        db.exec("delete from %s;" % table)

    for seq in [
            'app_id_seq', 'task_id_seq', 'task_execute_id_seq', 'site_id_seq',
            'domain_id_seq', 'setting_id_seq', 'dk_black_list_id_seq',
            'dk_filterword_id_seq', 'dk_white_list_id_seq', 'proxy_id_seq',
            'task_notify_id_seq', 'task_piping_id_seq',
            'task_piping_result_id_seq', 'task_piping_snapshot_id_seq',
            'spider_url_id_seq', 'piping_extend_id_seq'
    ]:
        db.exec("ALTER SEQUENCE %s RESTART WITH 1;" % seq)

    itemSpider = {
        'spiderurl': {
            'unique': ['id'],
            'index': ['task_id', 'execute_id']
        },
        'execute': {
            'unique': ['id'],
            'index': ['task_id']
        },
        'static': {
            'unique': ['domain', 'md5_body'],
            'index': ['domain', 'md5_url', 'md5_body']
        },
        'autoids': {
            'unique': ['name'],
            'index': []
        },
        'parse': {
            'unique': [],
            'index': ['task_id', 'execute_id', 'md5_url']
        },
        'snapshot': {
            'unique': ['id'],
            'index': ['app_key', 'batch']
        },
        'outlink': {
            'unique': [],
            'index': ['domain', 'md5_referer', 'md5_url', 'md5_body', 'date']
        },
    }
    for key, item in itemSpider.items():
        mongoSpider[key].drop()
        if item['unique']:
            uniqueField = [(field, 1) for field in item['unique']]
            mongoSpider[key].ensure_index(uniqueField, unique=True)
        if item['index']:
            for field in item['index']:
                mongoSpider[key].ensure_index(field)

    itemMq = {
        'mq_spider_undo': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_spider_ready': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_spider_doing': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_spider_done': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_mirror_undo': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_mirror_ready': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_mirror_doing': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_mirror_done': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_piping_undo': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_piping_ready': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_piping_doing': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_piping_done': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_notify_undo': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_notify_ready': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_notify_doing': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_notify_done': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_snapshot_undo': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_snapshot_ready': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_snapshot_doing': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'mq_snapshot_done': {
            'unique': ['mq_id'],
            'index': ['mq_batch']
        },
        'stats_mq': {
            'unique': ['mqkey'],
            'index': []
        },
        'stats_batch_run': {
            'unique': ['mqkey', 'batch'],
            'index': []
        },
        'stats_batch_stage': {
            'unique': ['mqkey', 'batch'],
            'index': []
        },
        'process_list': {
            'unique': ['hostname', 'title'],
            'index': ['mqkey', 'status']
        },
        # 'process_config':    {'unique':['hostname'],          'index':[]},
    }
    for key, item in itemMq.items():
        mongoMq[key].drop()
        if item['unique']:
            uniqueField = [(field, 1) for field in item['unique']]
            mongoMq[key].ensure_index(uniqueField, unique=True)
        if item['index']:
            for field in item['index']:
                mongoMq[key].ensure_index(field)

    for key in ['spider', 'mirror', 'piping', 'notify', 'snapshot']:
        redis.delete('mq_%s_ready' % key)