Example #1
0
def bzpoptimed(
        r: Redis,
        key: str,
        *,
        timeout: Union[float, int] = 0) -> Optional[Tuple[bytes, float]]:
    # pylint: disable=function-redefined,missing-docstring,unused-argument; overload
    """Pop the earliest member (with score) in the sorted set at *key* as soon as it is available.

    Blocking variant of :meth:`zpoptimed`. *timeout* is the maximum number of seconds to block
    before returning ``None``, with ``0`` blocking indefinitely.
    """
    if r not in _BZPOPTIMED_CACHE:
        flags = set(r.config_get()['notify-keyspace-events']) | set('Kz')
        r.config_set('notify-keyspace-events', ''.join(flags))
        _BZPOPTIMED_CACHE.add(r)

    deadline = time() + timeout if timeout else float('inf')
    p = r.pubsub()
    p.subscribe('__keyspace@{}__:{}'.format(
        r.connection_pool.connection_kwargs['db'], key))
    while True:
        result = zpoptimed(r, key)
        if isinstance(result, tuple):
            p.close()
            return result
        now = time()
        if now >= deadline:
            return None
        sleep = max(min(result, deadline) - now, 0)
        # get_message() doesn't provide a way to block indefinitely, so just sleep very long
        p.get_message(timeout=60 * 60 if isinf(sleep) else sleep)
Example #2
0
class RedisStream(object):
    def __init__(self):
        self.redis = Redis(connection_pool=redis_pool)
        actual_version = StrictVersion(self.redis.info()["redis_version"])
        minimum_version = StrictVersion("2.8.0")
        if actual_version < minimum_version:
            raise NotImplementedError
        self.redis.config_set("notify-keyspace-events", "Ks")
        self.pubsub = self.redis.pubsub()
        self.pubsub.psubscribe("__keyspace@0__:*:*.*")
        # TODO: make redis db number (this ^) configurable
        self.clients = list()

    def _generator(self):
        for message in self.pubsub.listen():
            if message["type"] == "pmessage":
                app.logger.debug("Message received from Redis, building data packet.")
                minion_id = message["channel"].split(":")[1]
                function = message["channel"].split(":")[2]
                jid = self.redis.lindex("{0}:{1}".format(minion_id, function), 0)
                success = (
                    True if json.loads(self.redis.get("{0}:{1}".format(minion_id, jid))).get("retcode") == 0 else False
                )
                try:
                    timestamp = time.strptime(jid, "%Y%m%d%H%M%S%f")
                except ValueError:
                    continue  # do not pass info with faked jid's
                yield dict(
                    minion_id=minion_id,
                    function=function,
                    jid=jid,
                    success=success,
                    time=time.strftime("%Y-%m-%d, at %H:%M:%S", timestamp),
                )

    def register(self, client, function):
        self.clients.append((client, function))
        app.logger.debug("Client %s (function %s) registered." % (client, function))

    def send_or_discard_connection(self, client_tupl, data):
        client, function = client_tupl
        try:
            client.send(json.dumps(data))
            app.logger.debug("Data for jid %s sent to %s (function %s)" % (data["jid"], client, function))
        except Exception as e:  # TODO: this is either a ValueError from json, or some other exception from gevents websocket stuff
            self.clients.remove(client_tupl)
            app.logger.debug("%s (function %s) removed with reason: %s" % (client, function, e))

    def run(self):
        for data in self._generator():
            sent = 0
            for client, function in self.clients:
                if data["function"] == function:
                    gevent.spawn(self.send_or_discard_connection, (client, function), data)
                    sent = sent + 1
            app.logger.debug("Attempted to send data packet sent to %s of %s clients." % (sent, len(self.clients)))

    def start(self):
        gevent.spawn(self.run)
def _runtests(host, port, password=None):
    from django.test.simple import DjangoTestSuiteRunner
    client = Redis(host, port)
    try:
        client.config_set('requirepass', password)
    except ResponseError:
        client = Redis(host, port, password=password)
        client.config_set('requirepass', password)
    runner = DjangoTestSuiteRunner(verbosity=options.verbosity, interactive=True, failfast=False)
    failures =  runner.run_tests(['testapp'])
Example #4
0
def _runtests(host, port, password=None):
    from django.test.simple import DjangoTestSuiteRunner
    client = Redis(host, port)
    try:
        client.config_set('requirepass', password)
    except ResponseError:
        client = Redis(host, port, password=password)
        client.config_set('requirepass', password)
    runner = DjangoTestSuiteRunner(verbosity=options.verbosity,
                                   interactive=True,
                                   failfast=False)
    failures = runner.run_tests(['testapp'])
Example #5
0
class RedisStream(object):
    def __init__(self):
        self.redis = Redis(connection_pool=redis_pool)
        actual_version = StrictVersion(self.redis.info()['redis_version'])
        minimum_version = StrictVersion("2.8.0")
        if actual_version < minimum_version:
            raise NotImplementedError
        self.redis.config_set('notify-keyspace-events', 'Ks')
        self.pubsub = self.redis.pubsub()
        self.pubsub.psubscribe("__keyspace@0__:*:*.*")
        # TODO: make redis db number (this ^) configurable
        self.clients = list()

    def _generator(self):
        for message in self.pubsub.listen():
            if message['type'] == 'pmessage':
                app.logger.debug(
                    "Message received from Redis, building data packet.")
                minion_id = message['channel'].split(':')[1]
                function = message['channel'].split(':')[2]
                jid = self.redis.lindex('{0}:{1}'.format(minion_id, function),
                                        0)
                success = True if json.loads(
                    self.redis.get('{0}:{1}'.format(
                        minion_id, jid))).get('retcode') == 0 else False
                try:
                    timestamp = time.strptime(jid, "%Y%m%d%H%M%S%f")
                except ValueError:
                    continue  # do not pass info with faked jid's
                yield dict(minion_id=minion_id,
                           function=function,
                           jid=jid,
                           success=success,
                           time=time.strftime('%Y-%m-%d, at %H:%M:%S',
                                              timestamp))

    def register(self, client, function):
        self.clients.append((client, function))
        app.logger.debug("Client %s (function %s) registered." %
                         (client, function))

    def send_or_discard_connection(self, client_tupl, data):
        client, function = client_tupl
        try:
            client.send(json.dumps(data))
            app.logger.debug("Data for jid %s sent to %s (function %s)" %
                             (data['jid'], client, function))
        except Exception as e:  # TODO: this is either a ValueError from json, or some other exception from gevents websocket stuff
            self.clients.remove(client_tupl)
            app.logger.debug("%s (function %s) removed with reason: %s" %
                             (client, function, e))

    def run(self):
        for data in self._generator():
            sent = 0
            for client, function in self.clients:
                if data['function'] == function:
                    gevent.spawn(self.send_or_discard_connection,
                                 (client, function), data)
                    sent = sent + 1
            app.logger.debug(
                "Attempted to send data packet sent to %s of %s clients." %
                (sent, len(self.clients)))

    def start(self):
        gevent.spawn(self.run)
Example #6
0
 def handler(redis):
     r = Redis(**redis.dsn())
     r.config_set('save', '900 1')
     r.set('scott', '1')
     r.set('tiger', '2')
Example #7
0
from redis import Redis

app = Flask(__name__, static_folder='dependencies')
app.config['DEBUG'] = True

app.config['REDIS_HOST'] = '10.0.0.4'
app.config['REDIS_PORT'] = 6379
app.config['REDIS_DB'] = 0
app.config['REDIS_PASS'] = None

redis = Redis(
        host=app.config['REDIS_HOST'],
        port=app.config['REDIS_PORT'],
        db=app.config['REDIS_DB'],
        password=app.config['REDIS_PASS'])
redis.config_set('notify-keyspace-events', 'Kls')

class RedisStream(BaseNamespace, BroadcastMixin):
    def redis_emitter(self, channel, subscription):
        pubsub = Redis().pubsub()
        pubsub.subscribe(subscription)
        for message in pubsub.listen():
            if message['type'] == 'message':
                minion_id = message['channel'].split(':')[1]
                function = message['channel'].split(':')[2]
                jid = redis.lindex('{0}:{1}'.format(minion_id, function), 0)
                timestamp = time.strptime(jid, "%Y%m%d%H%M%S%f")
                self.emit(channel, dict(minion_id=minion_id, jid=jid, time=time.strftime('%Y-%m-%d, at %H:%M:%S', timestamp)))

    def on_subscribe_function(self, function):
        redis_channels = ["__keyspace@0__:{0}:{1}".format(minion, function) for minion in redis.smembers('minions')]
from redis import Redis

redis_connection = Redis(decode_responses=True, db=0)

redis_connection.config_set('notify-keyspace-events', 'E$')

pub_sub = redis_connection.pubsub()
pub_sub.subscribe('__keyevent@0__:set')

for message in pub_sub.listen():
    print(message)
Example #9
0
class RedisBase:
    def __init__(self, node=None, port=None):
        self.port = port
        self.node = node
        self.db = '/var/sockets/redis/redis.sock'

        if node and port:
            self.redis = Redis(host=node,
                               port=port,
                               socket_connect_timeout=1.0)
        elif node:
            self.redis = Redis(host=node, socket_connect_timeout=1.0)
        else:
            self.redis = Redis(unix_socket_path=self.db,
                               socket_connect_timeout=1.0)

    def get_master(self, node=None):
        """ return the master node of the redis cluster or query the given node
        :return: Name or False in case of failure
        """

        try:
            if node:
                redis = Redis(host=node, socket_connect_timeout=1.0)
            else:
                redis = self.redis
            redis_info = redis.info()
            if redis_info.get('role') == 'master':
                if node:
                    return node
                else:
                    return get_hostname()
            elif redis_info.get('role') == 'slave':
                return redis_info.get('master_host')
        except Exception as e:
            logger.error("RedisBase::get_master: {}".format(str(e)))
            return None

        return False

    def slave_of(self, node, port):
        """
        Make the current redis node a slave of the specified one
        :param node: IP address of an existing master node
        :param port: TCP port of the master node
        :return:
        """
        return self.redis.slaveof(node, port)

    def config_set(self, key, value):
        return self.redis.config_set(key, value)

    def config_rewrite(self):
        return self.redis.config_rewrite()

    def sentinel_monitor(self):
        """
        Dynamically configure sentinel to monitor the local redis node.
         WARNING: FOr sentinel to work properly, self.node is supposed to be an IP address)
        :param node: IP address of an existing node
        :return: False if we are not connected to sentinel
        """

        if not self.node or not self.port or self.port != 26379:
            return False

        return self.redis.sentinel_monitor('mymaster', self.node, 6379, 1)
Example #10
0
 def handler(redis):
     r = Redis(**redis.dsn())
     r.config_set('save', '900 1')
     r.set('scott', '1')
     r.set('tiger', '2')
Example #11
0
from redis import Redis

redis_connection = Redis(decode_responses=True, db=0)

redis_connection.config_set("notify-keyspace-events", "K$")

pub_sub = redis_connection.pubsub()
pub_sub.subscribe("__keyspace@0__:test_key")

for message in pub_sub.listen():
    print(message)
Example #12
0
class BangumiAnalyzer:
    def __init__(self, db, conf) -> None:
        self.conf = conf
        self.db = db
        self.redis = Redis(self.conf.REDIS_HOST,
                           self.conf.REDIS_PORT,
                           db=self.conf.REDIS_DATABASE,
                           password=self.conf.REDIS_PASSWORD)
        self.redis.config_set('maxmemory', self.conf.REDIS_MAX_MEMORY)
        self.redis.config_set('maxmemory-policy', 'allkeys-lru')

    @log_duration
    def get_animes_authors_refs_matrix(self):
        mat, media_ids, mids = None, None, None
        try:
            with h5py.File(self.conf.HDF5_FILENAME, 'r') as f:
                last_update = datetime.strptime(f.attrs['last_update'],
                                                '%Y-%m-%d %H:%M:%S.%f')
                if last_update > datetime.now() - timedelta(
                        hours=self.conf.HDF5_DATA_SET_TTL):
                    mat = np.array(f['animes_authors_refs_matrix'])
                    media_ids = np.array(f['media_ids'])
                    mids = np.array(f['mids'])
                else:
                    raise ValueError('Data Set Expired.')
        except (OSError, KeyError, ValueError) as e:
            logger.warning(
                'Data Set in HDF5 File Will Not be Used for Ref Matrix Because %s.'
                % e)

        if mat is None or media_ids is None or mids is None:
            media_ids, media_id_indexes, cur = [], {}, 0
            for entrance in self.db.get_all_entrances():
                media_ids.append(entrance['media_id'])
                media_id_indexes[str(entrance['media_id'])] = cur
                cur += 1

            authors_count = self.db.get_authors_count()
            mat = np.zeros((authors_count, len(media_ids)), dtype='int8')

            mids = []
            cur = 0
            for mid, reviews, _ in self.db.get_valid_author_ratings_follow_pairs(
            ):
                mids.append(mid)
                for review in reviews:
                    index = str(review['media_id'])
                    mat[cur, media_id_indexes[index]] = review['score']
                cur += 1

            with h5py.File(self.conf.HDF5_FILENAME, 'w') as f:
                f.create_dataset('animes_authors_refs_matrix', data=mat)
                f.create_dataset('media_ids', data=media_ids)
                f.create_dataset('mids', data=mids)
        return mat, media_ids, mids

    @staticmethod
    def asscalar(value):
        return np.asscalar(value) if (type(value) != int
                                      and type(value) != float) else value

    @staticmethod
    def calc_similarity(lhs, rhs):
        index = np.logical_and(lhs > 0, rhs > 0)
        lhs_shared, rhs_shared = lhs[index], rhs[index]
        return pearsonr(lhs, rhs)[0] if len(lhs_shared) > 1 else -1

    @log_duration
    def get_similarity_matrix(self, refs_matrix, dset):
        mat = None
        try:
            with h5py.File(self.conf.HDF5_FILENAME, 'r') as f:
                mat = np.array(f[dset])
        except (OSError, KeyError) as e:
            logger.warning(
                'Data Set in HDF5 File Will Not be Used for Similarity Matrix Because %s.'
                % e)

        if mat is None:
            _, cols_count = refs_matrix.shape
            mat = np.zeros((cols_count, cols_count))
            for i in range(0, cols_count):
                logger.info('Calculating Similarities %s/%s...' %
                            (i, cols_count))
                for j in range(i + 1, cols_count):
                    mat[i, j] = BangumiAnalyzer.calc_similarity(
                        refs_matrix[:, i], refs_matrix[:, j])
            mat += mat.T
            np.fill_diagonal(mat, -1)

            with h5py.File(self.conf.HDF5_FILENAME, 'r+') as f:
                f.create_dataset(dset, data=mat)
                f.attrs['last_update'] = str(datetime.now())
        return mat

    @log_duration
    def process_animes_top_matches(self, ref_mat, media_ids) -> None:
        logger.info('Calculating Animes Similarity Matrix...')
        animes_sim_mat = self.get_similarity_matrix(
            ref_mat, 'animes_similarity_matrix')
        logger.info('Animes Similarity Matrix %s Calculated.' %
                    str(animes_sim_mat.shape))
        animes_sim_indexes_mat = np.flip(
            animes_sim_mat.argsort()
            [:, 0 - self.conf.ANALYZE_ANIME_TOP_MATCHES_SIZE:],
            axis=1)
        logger.info('Animes Sim-Indexes %s Get Finished.' %
                    str(animes_sim_indexes_mat.shape))

        cur = 0
        for anime_sim_indexes in animes_sim_indexes_mat:
            self.db.update_anime_top_matches(
                self.asscalar(media_ids[cur]),
                [{
                    'media_id': self.asscalar(media_ids[index]),
                    'similarity': self.asscalar(animes_sim_mat[cur, index])
                } for index in anime_sim_indexes])
            cur += 1
        logger.info('Animes Top-Matches Persisted.')

    def process_author_recommendation(self, total_scores_with_weight,
                                      total_weight, mid, media_ids,
                                      top_matches):
        recommendation = []
        recommend_indexes_sorted = np.flip(
            (total_scores_with_weight / total_weight).argsort(), axis=0)
        author_watched_media_ids = self.db.get_author_watched_media_ids(
            self.asscalar(mid))
        for index in recommend_indexes_sorted:
            if len(recommendation
                   ) == self.conf.ANALYZE_AUTHOR_RECOMMENDATION_SIZE:
                break
            if media_ids[index] not in author_watched_media_ids:
                recommendation.append(self.asscalar(media_ids[index]))

        self.db.update_author_recommendation(self.asscalar(mid), top_matches,
                                             recommendation)

    @log_duration
    def process_authors_recommendation(self, ref_mat, media_ids, mids) -> None:
        logger.info('Calculating Animes Similarities...')
        try:
            authors_sim_mat = self.get_similarity_matrix(
                ref_mat.T, 'authors_similarity_matrix')
            logger.info(
                'Authors Similarity Matrix %s Calculated Using Numpy.' %
                str(authors_sim_mat.shape))
            authors_sim_indexes_mat = np.flip(
                authors_sim_mat.argsort()
                [:, 0 - self.conf.ANALYZE_AUTHOR_TOP_MATCHES_SIZE:],
                axis=1)
            logger.info('Authors Sim-Indexes %s Get Finished.' %
                        str(authors_sim_indexes_mat.shape))

            total_scores_with_weight, total_weight = 0, 0
            for i in range(0, len(authors_sim_indexes_mat)):
                top_matches = []
                for index in authors_sim_indexes_mat[i]:
                    if i != index:
                        similarity = self.asscalar(authors_sim_mat[i, index])
                        top_matches.append({
                            'mid': self.asscalar(mids[index]),
                            'similarity': similarity
                        })
                        total_scores_with_weight += similarity * ref_mat[index]
                        total_weight += similarity
                self.process_author_recommendation(total_scores_with_weight,
                                                   total_weight, mids[i],
                                                   media_ids, top_matches)
        except MemoryError:
            logger.warning(
                'Memory Error Caught, Using Redis as Cache to Calculate Similarities.'
            )
            for i in range(0, len(mids)):
                if self.db.is_need_re_calculate(self.asscalar(mids[i])):
                    logger.info(
                        "[%s/%s] Calculating %s's Top-Matches and Recommendation..."
                        % (i, len(mids), mids[i]))
                    similarities = np.empty((len(mids), ))
                    similarities[i] = -2
                    for j in range(0, len(mids)):
                        if i != j:
                            index_pair = '%s:%s' % (mids[min(i, j)], mids[max(
                                i, j)])
                            similarity = self.redis.get(index_pair)
                            if similarity is None:
                                similarity = self.calc_similarity(
                                    ref_mat[i], ref_mat[j])
                                self.redis.set(index_pair, similarity)
                                self.redis.expire(
                                    index_pair, self.conf.REDIS_SIMILARITY_TTL)
                            similarities[j] = similarity
                    sorted_indexes = np.flip(
                        similarities.argsort(),
                        axis=0)[0 - self.conf.ANALYZE_AUTHOR_TOP_MATCHES_SIZE:]

                    top_matches, recommendation = [], []
                    total_scores_with_weight, total_weight = 0, 0
                    for index in sorted_indexes:
                        if i != index:
                            similarity = self.asscalar(similarities[index])
                            top_matches.append({
                                'mid':
                                self.asscalar(mids[index]),
                                'similarity':
                                similarity
                            })
                            total_scores_with_weight += similarity * ref_mat[
                                index]
                            total_weight += similarity
                    self.process_author_recommendation(
                        total_scores_with_weight, total_weight, mids[i],
                        media_ids, top_matches)
                else:
                    logger.info('[%s/%s] Skip Calculating %s.' %
                                (i, len(mids), mids[i]))
        logger.info('Authors Top-Matches Persisted.')

    def analyze(self) -> None:
        logger.info('New Analyze Beginning...')

        logger.info('Getting Ref Matrix...')
        ref_mat, media_ids, mids = self.get_animes_authors_refs_matrix()
        logger.info('Ref Matrix %s Got, with %s Medias and %s Authors.' %
                    (ref_mat.shape, len(media_ids), len(mids)))

        self.process_animes_top_matches(ref_mat, media_ids)
        self.process_authors_recommendation(ref_mat, media_ids, mids)

        logger.info('Analyzing Tasks Finished.')
        gc.collect()
Example #13
0
import sqlite3
from redis import Redis
from conn import SQLITE_DB_URI

client = Redis()
cf = 'stop-writes-on-bgsave-error'
if client.config_get(cf).get(cf) == 'yes': client.config_set(cf, 'no')


class RedisClient():
    def table(self, table):
        """
        # 切换表
        :param table:
        :return:
        """

        self.__table = table
        return self

    def pop(self, left=False):
        """
        从指定方向弹出一条
        :param left:
        :return:
        """
        if left: row = client.lpop(self.__table)
        else: row = client.rpop(self.__table)
        if row: return row.decode('utf-8')
        return None
Example #14
0
from random import randint

from redis import Redis

redis = Redis()
cf = 'stop-writes-on-bgsave-error'
if redis.config_get(cf).get(cf) == 'yes': redis.config_set(cf, 'no')

table = 'proxies'


class RedisClient():
    def pop(self, left=False):
        """
        从指定方向弹出一条
        :param left:
        :return:
        """
        if left: row = redis.lpop(table)
        else: row = redis.rpop(table)
        if row: return row.decode('utf-8')
        return None

    def put(self, row, left=True):
        """
        从指定方向添加一条数据
        :param proxy:
        :param left:
        :return:
        """
        if left: return redis.lpush(table, row)