예제 #1
0
class RedisDataStore(DataStore):
    """Redis-backed datastore object."""

    def __init__(self, number=0):
        redis_host = os.environ.get('REDIS_PORT_6379_TCP_ADDR')
        redis_port = os.environ.get('REDIS_PORT_6379_TCP_PORT')
        self.redis_conn = StrictRedis(host=redis_host, port=redis_port,
                                      db=number)

    def __setitem__(self, k, v):
        self.redis_conn.set(k, v)

    def __getitem__(self, k):
        return self.redis_conn.get(k)

    def __delitem__(self, k):
        self.redis_conn.delete(k)

    def get(self, k):
        return self.redis_conn.get(k)

    def __contains__(self, k):
        return self.redis_conn.exists(k)

    def todict(self):
        #TODO(tvoran): use paginate
        #TODO(tvoran): do something besides multiple gets
        data = {}
        for key in self.redis_conn.keys():
            data[key] = self.get(key)
        return data

    def clear_all(self):
        self.redis_conn.flushdb()
예제 #2
0
def test((t, duration, type_)):
    conn = StrictRedis()
    conn.flushdb()
    ret = []

    def run():
        iterations = 0
        signal.setitimer(signal.ITIMER_REAL, int(sys.argv[1]))
        try:
            if type_ == 'redis_lock':
                while True:
                    with Lock(conn, "test-lock", expire=5):
                        iterations += 1
                        time.sleep(duration)
            elif type_ == 'native':
                while True:
                    with conn.lock("test-lock", timeout=5):
                        iterations += 1
                        time.sleep(duration)
        except:
            logging.info("Got %r. Returning ...", sys.exc_value)
        ret.append(iterations)

    sched = scheduler(time.time, time.sleep)
    logging.info("Running in %s seconds ...", t - time.time())
    sched.enterabs(t, 0, run, ())
    sched.run()
    return ret[0]
예제 #3
0
파일: utils.py 프로젝트: yangryan0/cirrus-1
def wipe_redis():
    """ Wipe all keys from Redis """
    creds = get_redis_creds()
    redis_client = StrictRedis(
        host=creds["host"], port=creds["port"], password=creds["password"],
        db=creds["db"])
    redis_client.flushdb()
예제 #4
0
class SutroTestCase(TestCase):
  def setUp(self):
    redis_connection_pool = ConnectionPool(**settings.WS4REDIS_CONNECTION)
    self.redis = StrictRedis(connection_pool=redis_connection_pool)
    self.client = Client()

  def tearDown(self):
    self.redis.flushdb()

  def random_string(self, length=None, str_type=None):
    DEFAULT_LENGTH = 10
    length = length if length else DEFAULT_LENGTH
    if str_type == 'number':
      string_type = string.digits
    else:
      string_type = string.lowercase
    return ''.join(random.choice(string_type) for x in range(length))

  def create_a_user(self, display_name, user_id=None, icon_url=None, user_url=None, rdio_key=None):
    user = User()
    user.display_name = display_name
    user.id = user_id if user_id else uuid.uuid4().hex
    user.icon_url = icon_url if icon_url else 'http://' + self.random_string() + '.jpg'
    user.user_url = user_url if user_url else 'http://' + self.random_string() + '.com/' + self.random_string()
    user.rdio_key = rdio_key if rdio_key else 's' + self.random_string(length=5, str_type='number')
    user.save(self.redis)
    return user

  def create_a_party(self, party_id, name):
    party = Party()
    party.id = party_id
    party.name = name
    party.save(self.redis)
    return party
예제 #5
0
파일: cache.py 프로젝트: aehuynh/scavenger
class IndexCache(object):

    def __init__(self, host, port):
        self.client = StrictRedis(host=host, port=port, db=0)


    def build(self, doc_word_scores):
        """Clears the entire store and adds all the doc_word_scores into a
        hash table in the store.

        :param doc_word_scores: dictionary of dictionaries that looks like:
                    doc_word_scores[word][doc_id] = score of word in that
                                                    document
        """
        self.reset()

        for word, doc_id_score in doc_word_scores.items():
            # Add table name to word
            word_key = DOCUMENT_WORD_SCORE_NAME + word
            self.client.hmset(word_key, doc_id_score)

        self.save_to_disk()

    def reset(self):
        """Deletes all keys in this DB.
        """
        self.client.flushdb()

    def save_to_disk(self):
        """Asyncronous write to disk for persistent storage.
        """
        self.client.bgsave()

    def to_dict(self):
        """Returns the "doc_word_scores" table in Redis in dictionary form.
        """
        doc_word_scores = {}

        for word_key in self.doc_word_scores_iter():
            # Remove the table name from the key
            word = word_key.replace(DOCUMENT_WORD_SCORE_NAME, "")

            # Grab the {doc_ids : scores} dictionary for word
            doc_word_scores[word] = self.client.hgetall(word_key)

        return doc_word_scores

    def doc_word_scores_iter(self):
        """Returns an iterator for the keys of all the words stored in Redis
        """
        return self.client.scan_iter(match=DOCUMENT_WORD_SCORE_NAME + "*")

    def is_empty(self):
        return self.client.dbsize() <= 0

    def doc_scores(self, word):
        """Returns a hash table of document_ids mapping to scores
        """
        word_key = DOCUMENT_WORD_SCORE_NAME + word
        return self.client.hgetall(word_key)
예제 #6
0
def test_lshash_redis_extra_val():
    """
    Test external lshash module
    """
    config = {"redis": {"host": 'localhost', "port": 6379, "db": 15}}
    sr = StrictRedis(**config['redis'])
    sr.flushdb()

    lsh = LSHash(6, 8, 1, config)
    for i in xrange(num_elements):
        lsh.index(list(els[i]), el_names[i])
        lsh.index(list(els[i]), el_names[i])  # multiple insertions
    hasht = lsh.hash_tables[0]
    itms = [hasht.get_list(k) for k in hasht.keys()]
    for itm in itms:
        assert itms.count(itm) == 1
        for el in itm:
            assert el[0] in els
            assert el[1] in el_names
    for el in els:
        res = lsh.query(list(el), num_results=1, distance_func='euclidean')[0]
        # vector an name are in the first element of the tuple res[0]
        el_v, el_name = res[0]
        # the distance is in the second element of the tuple
        el_dist = res[1]
        assert el_v in els
        assert el_name in el_names
        assert el_dist == 0
    del lsh
    sr.flushdb()
def read_equip_name():
    try:
        redis_0 = StrictRedis(host='localhost',
                              port=6379,
                              db=0,
                              decode_responses=True,
                              password='******')
        redis_0.flushdb()
        data = db.equip_name_redis()
        data_box = {}

        for x in data:
            if x[0] not in data_box:
                data_box[x[0]] = {'0101': '', '0102': ''}
            data_box[x[0]][x[1]] += x[2] + ':' + x[3]
            data_box[x[0]][x[1]] += ','

        for x in data_box:
            for y in data_box[x]:
                data_box[x][y] = data_box[x][y][:-1]

        for x in data_box:
            redis_0.hmset(x, data_box[x])
        return True
    except Exception as e:
        print(e)
        return False
예제 #8
0
def drop_database(db_name, redis_database=0):
    mongo_client = MongoClient()
    db = getattr(mongo_client, db_name)
    db.client.drop_database(db_name)

    redis_client = StrictRedis(db=redis_database)
    redis_client.flushdb()
예제 #9
0
def test(arg):
    t, duration, type_ = arg
    conn = StrictRedis()
    conn.flushdb()
    ret = []

    def run():
        iterations = 0
        signal.setitimer(signal.ITIMER_REAL, int(sys.argv[1]))
        try:
            if type_ == 'redis_lock':
                lock = Lock(conn, "test-lock")
            elif type_ == 'native':
                lock = conn.lock("test-lock")
            else:
                raise RuntimeError
            while True:
                with lock:
                    iterations += 1
                    if duration:
                        time.sleep(duration)
        except Exit as exc:
            logging.info("Got %r. Returning ...", exc)
        ret.append(iterations)

    sched = scheduler(time.time, time.sleep)
    logging.info("Running in %s seconds ...", t - time.time())
    sched.enterabs(t, 0, run, ())
    sched.run()
    return ret[0]
예제 #10
0
파일: utils.py 프로젝트: jf-parent/webbase
def drop_database(db_name, redis_database=0):
    mongo_client = MongoClient()
    db = getattr(mongo_client, db_name)
    db.client.drop_database(db_name)

    redis_client = StrictRedis(db=redis_database)
    redis_client.flushdb()
예제 #11
0
    def test_query_and_worker(self):
        """Test Query and Worker objects.
        """

        # Flush test database (manual access)
        r = StrictRedis(connection_pool=self.conn_pool)
        r.flushdb()

        # Testing Query module
        PaperRank.update.Query(conn_pool=self.conn_pool,
                               pmids=[21876761, 21876726, 29409535, 29025144],
                               proc_count=self.proc_count,
                               lock=self.lock)
        # Testing behavior with invalid IDs
        PaperRank.update.Query(conn_pool=self.conn_pool,
                               pmids=['sdfgsdg'],
                               proc_count=self.proc_count,
                               lock=self.lock)

        dangling_count = r.scard(name='DANGLING')
        seen_count = r.scard('SEEN')

        # Note: As this is a clean database, the number of
        # tuples added to `GRAPH` should be equal to the number of
        # IDs added to `EXPLORE`, as they are all not in `SEEN`. This implies
        # that their difference should be 0, and not affect the sum of the
        # number of IDs in `DANGLING` and the number of IDs in `SEEN`
        explore_graph_diff = r.scard(name='GRAPH') - r.scard(name='EXPLORE')

        self.assertEqual(dangling_count + seen_count + explore_graph_diff, 5)

        # Flush test database again (manual access)
        r.flushdb()
예제 #12
0
def test_lshash_redis():
    """
    Test external lshash module
    """
    config = {"redis": {"host": 'localhost', "port": 6379, "db": 15}}
    sr = StrictRedis(**config['redis'])
    sr.flushdb()

    lsh = LSHash(6, 8, 1, config)
    for i in xrange(num_elements):
        lsh.index(list(els[i]))
        lsh.index(list(els[i]))  # multiple insertions should be prevented by the library
    hasht = lsh.hash_tables[0]
    itms = [hasht.get_list(k) for k in hasht.keys()]
    for itm in itms:
        for el in itm:
            assert itms.count(itm) == 1  # have multiple insertions been prevented?
            assert el in els
    for el in els:
        res = lsh.query(list(el), num_results=1, distance_func='euclidean')[0]
        el_v, el_dist = res
        assert el_v in els
        assert el_dist == 0
    del lsh
    sr.flushdb()
예제 #13
0
def test((t, duration, type_)):
    conn = StrictRedis()
    conn.flushdb()
    ret = []

    def run():
        iterations = 0
        signal.setitimer(signal.ITIMER_REAL, int(sys.argv[1]))
        try:
            if type_ == 'redis_lock':
                while True:
                    with Lock(conn, "test-lock", expire=5):
                        iterations += 1
                        time.sleep(duration)
            elif type_ == 'native':
                while True:
                    with conn.lock("test-lock", timeout=5):
                        iterations += 1
                        time.sleep(duration)
        except:
            logging.info("Got %r. Returning ...", sys.exc_value)
        ret.append(iterations)

    sched = scheduler(time.time, time.sleep)
    logging.info("Running in %s seconds ...", t - time.time())
    sched.enterabs(t, 0, run, ())
    sched.run()
    return ret[0]
예제 #14
0
class TestLimitByTimeRuleIntegrated():
    def setup_method(self):
        self.unavailable_redis_client = StrictRedis(host='localhost', port=666, db=0, decode_responses=True)
        self.redis_client = StrictRedis(host='localhost', port=6379, db=0, decode_responses=True)
        self.redis_client.flushdb()
    
    def test_should_raise_HttpLimitError_when_execution_count_exceeded(self):
        time_limit = 10
        request_limit = 3
        limit_by_time = LimitByTimeRule(self.redis_client, time_limit, request_limit)
        uid = "test1"

        for i in range(0, request_limit):
            limit_by_time.apply(uid)
        
        #sleep for 1 second
        time.sleep(1) 
        
        #limit reached
        with pytest.raises(HttpLimitError) as exinfo:
            limit_by_time.apply(uid)
        
        exception = exinfo.value
        exception.status_code | should.be.equal.to(HTTPStatus.TOO_MANY_REQUESTS)
        
        match = re.findall("\d+", exception.message)
        match | should.have.length.of(1)
        ttl_integer = int(match[0])
        ttl_integer | should.be.equal.to(time_limit - 1)
        
    def test_should_run_when_execution_count_not_exceeded(self):
        time_limit = 10
        request_limit = 4
        limit_by_time = LimitByTimeRule(self.redis_client, time_limit, request_limit)
        uid = "test2"

        for i in range(0, request_limit):
            limit_by_time.apply(uid)        

    def test_should_raise_HttpLimitError_when_redis_is_not_available(self):
        time_limit = 10
        request_limit = 4
        limit_by_time = LimitByTimeRule(self.unavailable_redis_client, time_limit, request_limit, fail_on_connection_error=True)
        uid = "test3"

        with pytest.raises(HttpLimitError) as exinfo:
            limit_by_time.apply(uid)

        exception = exinfo.value
        exception.status_code | should.be.equal.to(HTTPStatus.INTERNAL_SERVER_ERROR)
        exception.message | should.match("Error calculating rate limit.")

    def test_should_run_when_configured_to_run_and_redis_is_not_available(self):
        time_limit = 10
        request_limit = 4
        limit_by_time = LimitByTimeRule(self.unavailable_redis_client, time_limit, request_limit, fail_on_connection_error=False)
        uid = "test4"

        limit_by_time.apply(uid)
예제 #15
0
def cleardb():
    redis = StrictRedis(host='redis',
                        port=6379,
                        db=0,
                        password='******',
                        charset="utf-8",
                        decode_responses=True)
    redis.flushdb()
예제 #16
0
def seed_db():
    client = StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
    client.flushdb()
    players = distribute_players()
    for player in players:
        tasks = generate_tasks()
        redis_player = Player(row=player[0], column=player[1], tasks=tasks)
        redis_player.save()
예제 #17
0
def pytest_runtest_teardown(item):
    from sentry.app import tsdb
    tsdb.flush()

    client = StrictRedis(db=9)
    client.flushdb()

    from celery.task.control import discard_all
    discard_all()
예제 #18
0
class RedisController:
    def __init__(self, config):
        self.isGenerateURL = False

        redis_config = config['Redis']
        host = redis_config.get('host', '127.0.0.1')
        db = redis_config.getint('db', 0)
        port = redis_config.getint('port', 6379)
        self.url_db = StrictRedis(host=host, port=port, db=db)

        self.task_url_name = redis_config.get('task_url_name', 'TaskURLs')
        self.seen_url_name = redis_config.get('seen_url_name', 'SeenURLs')

    def storage_urls(self, urls):
        '''storage urls extracted by spider'''
        if not urls: return

        pipe = self.url_db.pipeline()
        for url in urls:
            pipe.sadd(self.seen_url_name, url)
        flags = pipe.execute()
        if not 1 in flags: return

        #storage new urls to task list
        new_urls = filter(lambda x: x, (url if flag else False
                                        for flag, url in zip(flags, urls)))
        pipe.rpush(self.task_url_name, *new_urls)
        pipe.execute()

    def get_url(self):
        '''get url from url db'''
        url = self.url_db.lpop(self.task_url_name)
        return url.decode('utf-8') if url else None

    def is_task_empty(self):
        if self.url_db.llen(self.task_url_name) > 0:
            return False
        else:
            return True

    def flushdb(self):
        self.url_db.flushdb()

    def check_connect(self):
        #test redis connection
        self.url_db.ping()

    @property
    def seen_count(self):
        return self.url_db.scard(self.seen_url_name)

    @property
    def task_count(self):
        return self.url_db.llen(self.task_url_name)
예제 #19
0
    def store(self):
        from simplekv.memory.redisstore import RedisStore
        r = StrictRedis()

        try:
            r.get('anything')
        except ConnectionError:
            pytest.skip('Could not connect to redis server')

        r.flushdb()
        yield RedisStore(r)
        r.flushdb()
예제 #20
0
class BaseHttpLimitIntegratedTest(LiveServerTestCase):
    def init_app(self):
        app = Flask(__name__)
        app.config['TESTING'] = True
        app.config['LIVESERVER_PORT'] = 5000
        app.config['LIVESERVER_TIMEOUT'] = 10

        app.add_url_rule("/", "index", view_func=self._view)
        app.add_url_rule("/test", "test", view_func=self._view)

        return app

    def init_http_limit(self):
        """
        To be overwritten by child classes
        """
        pass

    def create_app(self):

        app = self.init_app()

        self.redis_client = StrictRedis(host='localhost',
                                        port=6379,
                                        db=0,
                                        decode_responses=True)
        self.redis_client.flushdb()

        self.time_limit = 30
        self.request_limit = 3

        self.logger = self._get_logger()

        self.init_http_limit(app)
        return app

    def _view(self):
        return "test", HTTPStatus.OK

    def _get_logger(self):
        logging.basicConfig(level=logging.DEBUG)
        logger = logging.getLogger(__name__)

        handler = logging.StreamHandler(sys.stdout)
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        handler.setFormatter(formatter)

        logger.addHandler(handler)

        return logger
예제 #21
0
class TestComputeUtil(unittest.TestCase):
    """Test the compute engine `util` module.
    """
    def __init__(self, *args, **kwargs):
        # Running superclass initialization
        super(TestComputeUtil, self).__init__(*args, **kwargs)

        # Setting up PaperRank
        PaperRank.util.configSetup()
        self.config = PaperRank.util.config

        # Connecting to redis
        self.redis = StrictRedis(host=self.config.test['redis']['host'],
                                 port=self.config.test['redis']['port'],
                                 db=self.config.test['redis']['db'])

    def test_buildOutDegreeMap(self):
        """Test the `buildOutDegreeMap` function from the `util` submodule.
        This function also tests the behavior of only computing out degrees
        for IDs that do not yet have them computed.
        """

        # Flush db, set up example data
        self.redis.flushdb()

        # Setting up sample data
        outbound_map = {1: [2, 3], 2: [3, 4], 3: [4], 4: [], 5: [1, 2, 3, 4]}

        # Expected output
        expected_out_degree = {
            b'1': b'2',
            b'2': b'2',
            b'3': b'1',
            b'4': b'0',
            b'5': b'42'
        }

        # Adding outbound citation map, and dummy #5 citation
        self.redis.hmset('OUT', outbound_map)
        self.redis.hmset('OUT_DEGREE', {5: 42})

        # Running util function
        PaperRank.compute.util.buildOutDegreeMap(r=self.redis)

        # Checking output
        out_degree = self.redis.hgetall('OUT_DEGREE')

        self.assertDictEqual(out_degree, expected_out_degree)
예제 #22
0
class RedisStorage(Storage):

    def __init__(self, settings):
        host = settings.get('TRACKER_RADIS_HOST', 'localhost')
        port = settings.getint('TRACKER_RADIS_PORT', 6379)
        db = settings.getint('TRACKER_RADIS_DB', 0)
        password = settings.get('TRACKER_RADIS_PASSWORD', None)

        self._redis = StrictRedis(host, port, db, password=password)

        drop_all_keys = settings.getbool('TRACKER_RADIS_FLUSH_DB', False)
        if drop_all_keys:
            self._redis.flushdb()

    def getset(self, key, checksum):
        return self._redis.getset(key, checksum)
예제 #23
0
파일: testing.py 프로젝트: atatsu/redcon
class RedconTestBase(unittest.TestCase):

    def setUp(self):
        if hasattr(testsettings, 'REDIS_DB_UNIX_SOCKET_PATH'):
            self.redis = StrictRedis(
                db=testsettings.REDIS_DB,
                unix_socket_path=testsettings.REDIS_DB_UNIX_SOCKET_PATH,
            )
        else:
            self.redis = StrictRedis(
                db=testsettings.REDIS_DB,
                host=testsettings.REDIS_DB_HOST,
                port=testsettings.REDIS_DB_PORT,
            )

        self.redis.flushdb()
        self.redcon = Redcon(self.redis)
예제 #24
0
class RedisManager(NoSqlManager):
    def __init__(self, namespace, url=None, data_dir=None, lock_dir=None, **params):
        self.expiretime = params.pop('expiretime', None)
        NoSqlManager.__init__(self, namespace, url=url, data_dir=data_dir, lock_dir=lock_dir, **params)

    def open_connection(self, host, port, **params):
        self.db_conn = StrictRedis(host=host, port=int(port), **params)

    def __getitem__(self, key):
        return pickle.loads(self.db_conn.hget(self._format_key(key), 'data'))

    def __contains__(self, key):
        return self.db_conn.exists(self._format_key(key))

    def set_value(self, key, value, expiretime=None):
        key = self._format_key(key)

        #
        # beaker.container.Value.set_value calls NamespaceManager.set_value
        # however it (until version 1.6.4) never sets expiretime param.
        #
        # Checking "type(value) is tuple" is a compromise
        # because Manager class can be instantiated outside container.py (See: session.py)
        #
        if (expiretime is None) and (type(value) is tuple):
            expiretime = value[1]

        self.db_conn.hset(key, 'data', pickle.dumps(value))
        self.db_conn.hset(key, 'accessed', datetime.now())
        self.db_conn.hsetnx(key, 'created', datetime.now())

        if expiretime or self.expiretime:
            self.db_conn.expire(key, expiretime or self.expiretime)

    def __delitem__(self, key):
        self.db_conn.delete(self._format_key(key))

    def _format_key(self, key):
        return 'beaker:%s:%s' % (self.namespace, key.replace(' ', '\302\267'))

    def do_remove(self):
        self.db_conn.flushdb()

    def keys(self):
        return self.db_conn.keys('beaker:%s:*' % self.namespace)
예제 #25
0
class testTaggedCache(unittest.TestCase):
    def setUp(self):
        self.redis = StrictRedis(db=DATABASE)
        self.redis.flushdb()
        self.cache = TaggedCache(self.redis)

    def testEverything(self):
        cache = self.cache

        # Set
        cache.set("User:1", "mitch", tags=["User", "PremiumUser"])
        cache.set("User:2", "foo", tags=["User"])
        cache.set("Post:1", "Hello World!", tags=["Post"])
        cache.set("Post:2", "Hello World, again!", tags=["Post"])
        
        self.assertEquals(cache.get("Post:1"), "Hello World!")
        self.assertEquals(cache.get_keys("Post"), set(["Post:1", "Post:2"]))
        self.assertEquals(cache.get_keys("User"), set(["User:1", "User:2"]))
        self.assertEquals(cache.get_tags("User:1"), set(["User", "PremiumUser"]))
        self.assertEquals(cache.get_tags("User:2"), set(["User"]))

        # Delete all post cache entries
        cache.clear_tag("Post")
        self.assertEquals(cache.get("Post:1"), None, "Post:1 still exists")
        self.assertEquals(cache.get("Post:2"), None, "Post:2 still exists")
        
        # Delete User 2 from cache
        cache.clear("User:2")
        self.assertEquals(cache.get_tags("User:2"), set())

        # Clear everything else
        cache.clear_all()
        cache.gc()

        self.assertEquals(self.redis.get("CacheKeys"), None)
        self.assertEquals(self.redis.get("Tags"), None)
        
        self.assertEquals(len(self.redis.keys("*")), 0, "Some keys were not gc'ed")

    def testExpireTtl(self):
        self.cache.set("ExpireMe", "foo", ttl=1)
        time.sleep(2)
        self.assertEquals(self.cache.get("ExpireMe"), None)
예제 #26
0
파일: ch7_60.py 프로젝트: N4CL/NLP100
def main():
    json_data = []
    with open("artist.json", 'r') as _file:
        for line in _file.readlines():
            # json stringをdic型に変換
            json_data.append(loads(line))

    path = "./config.ini"
    redis_config = RedisConfig(path)
    host, port, db = redis_config.read_config()

    r = StrictRedis(host=host, port=port, db=db)

    # 接続しているDBを全消し
    r.flushdb()

    # Redisに登録
    for dic in json_data:
        if "area" in dic:
            r.set(str(dic["id"]) + "_" + dic["name"], dic["area"])
예제 #27
0
class RedisCache:
    def __init__(self, client = None, encoding = 'utf-8', db = 0, compress = True):
        # if a client object is not passed then try
        # connecting to redis at the default localhost port
        self.client = StrictRedis(host = 'localhost', port = 6379, db = db) if client is None else client
        # self.expires = expires
        self.encoding = encoding
        self.compress = compress
        

    def __getitem__(self, url):
        '''
        Load value from Redis for the given URL
        '''
        record = self.client.get(url)
        if record:
            if self.compress:
                record = zlib.decompress(record)
            try:
                rec = record.decode(self.encoding)
            except UnicodeDecodeError:
                rec = bytes(json.dumps({'html' : None, 'code' : 403}), self.encoding)
            return json.loads(rec)
        else:
            raise KeyError(url + ' does not exist.')

    def __setitem__(self, url, result):
        '''
        Save value in Redis for the given URL
        '''
        data = bytes(json.dumps(result), self.encoding, errors = 'ignore')
        if self.compress:
            data = zlib.compress(data)
        self.client.set(url, data)

    def __len__(self):
        return self.client.dbsize()

    def erase(self):
        self.client.flushdb()
예제 #28
0
def resync_redis_with_cache(connection: "Connection",
                            redis: StrictRedis) -> None:
    """
    Reset redis to be in sync with the current contents of the cache.

    Parameters
    ----------
    connection : Connection
    redis : StrictRedis

    Returns
    -------
    None

    Notes
    -----
    You _must_ ensure that no queries are currently running when calling this function.
    Any queries currently running will no longer be tracked by redis, and UNDEFINED BEHAVIOUR
    will occur.
    """
    logger.debug("Redis resync")
    qry = f"SELECT query_id FROM cache.cached"
    queries_in_cache = connection.fetch(qry)
    logger.debug("Redis resync", queries_in_cache=queries_in_cache)
    redis.flushdb()
    logger.debug("Flushing redis.")
    for event in (QueryEvent.QUEUE, QueryEvent.EXECUTE, QueryEvent.FINISH):
        for qid in queries_in_cache:
            new_state, changed = QueryStateMachine(
                redis, qid[0], connection.conn_id).trigger_event(event)
            logger.debug(
                "Redis resync",
                fast_forwarded=qid[0],
                new_state=new_state,
                fast_forward_succeeded=changed,
            )
            if not changed:
                raise RuntimeError(
                    f"Failed to trigger {event} on '{qid[0]}', ensure nobody else is accessing redis!"
                )
def read_status():
    try:
        redis_3 = StrictRedis(host='localhost',
                              port=6379,
                              db=3,
                              decode_responses=True,
                              password='******')
        redis_3.flushdb()
        equip = db.read_status()
        data = [dict(row) for row in equip]

        records = {}
        for x in data:
            for y in x:
                if x[y]:
                    x[y] = str(x[y])
                else:
                    x[y] = ''

        for x in data:
            records[x['eid']] = x
            records[x['eid']]['user'] = ''
        for x in records:
            records[x].pop('eid')
        user_dic = {}
        equip_user = db.equip_user()
        for x in equip_user:
            if x[0] not in user_dic:
                user_dic[x[0]] = [x[1]]
            else:
                user_dic[x[0]].append(x[1])
        for x in user_dic:
            records[x]['user'] = '******'.join(user_dic[x])
        for x in records:
            redis_3.hset(name=x, mapping=records[x])
        return True
    except Exception as e:
        print(e)
        return False
예제 #30
0
파일: test.py 프로젝트: NoyaInRain/wall
class TestCase(AsyncTestCase):
    """
    Subclass API: Base for Wall unit tests. Takes care of setting / cleaning up
    the test environment and provides utilities for testing.

    Attributes:

     * `db`: connection to temporary Redis database (`15`)
     * `app`: Wall application. `TestPost` is available as registered post type.
     * `user`: active user.
    """

    def setUp(self):
        super(TestCase, self).setUp()
        self.db = StrictRedis(db=15)
        self.db.flushdb()
        self.app = WallApp(config={'db': 15})
        self.app.add_post_type(TestPost)
        self.user = self.app.login('Ivanova', 'test')
        self.app.user = self.user

    def get_new_ioloop(self):
        return IOLoop.instance()
예제 #31
0
class TestPredis(object):

    def setUp(self):
        self.redis = StrictRedis(db=9)
        self.predis = Predis(prefix="test", db=9)

    def tearDown(self):
        self.redis.flushdb()

    def test_set(self):
        self.predis.set("foo", "bar")
        assert self.redis.get("test:foo") == "bar"

    def test_get(self):
        self.redis.set("test:foo", "bar")
        assert self.predis.get("foo") == "bar"

    def test_sunionstore(self):
        self.redis.sadd("test:foo", "bar")
        self.redis.sadd("test:boo", "bat")
        self.redis.sadd("test:goo", "bak")
        self.predis.sunionstore("blah", ["foo", "boo"], "goo")
        assert self.redis.smembers("test:blah") == set(["bar", "bat", "bak"])
예제 #32
0
파일: ch7_63.py 프로젝트: N4CL/NLP100
def build_db():
    json_data = []
    with open("artist.json", 'r') as _file:
        for line in _file.readlines():
            # json stringをdic型に変換
            json_data.append(loads(line))

    path = "./config.ini"
    redis_config = RedisConfig(path)
    host, port, db = redis_config.read_config()
    r = StrictRedis(host=host, port=port, db=db)

    # 接続しているDBを全消し
    r.flushdb()

    # Redisに登録
    for dic in json_data:
        if "tags" in dic:
            for tag in dic["tags"]:

                # ユニークID + アーティスト名, 被タグ数_タグのリストの組み合わせ
                r.sadd(str(dic["id"]) + "_" + dic["name"],
                       str(tag["count"]) + "_" + tag["value"])
예제 #33
0
def redis_rules(request):
    """Load up some sample traptor rules into Redis."""
    with open('tests/data/track_rules.json') as f:
        track_rules = [json.loads(line) for line in f]
    with open('tests/data/follow_rules.json') as f:
        follow_rules = [json.loads(line) for line in f]
    with open('tests/data/locations_rules.json') as f:
        locations_rules = [json.loads(line) for line in f]

    conn = StrictRedis(host=HOST_FOR_TESTING, port=6379, db=5)
    conn.flushdb()

    rc = RulesToRedis(conn)
    rc.send_rules(traptor_type='track', rules=track_rules)
    rc.send_rules(traptor_type='follow', rules=follow_rules)
    rc.send_rules(traptor_type='locations', rules=locations_rules)

    def cleanup():
        conn.flushdb()

    request.addfinalizer(cleanup)

    return conn
예제 #34
0
파일: test.py 프로젝트: krmnn/wall
class TestCase(AsyncTestCase):
    """
    Extension API: Base for Wall unit tests. Takes care of setting / cleaning up
    the test environment and provides utilities for testing.

    Attributes:

     * `db`: connection to temporary Redis database (`15`)
     * `app`: Wall application. `TestPost` is available as registered post type.
    """

    @classmethod
    def setUpClass(cls):
        getLogger('wall').setLevel(CRITICAL)

    def setUp(self):
        super(TestCase, self).setUp()
        self.db = StrictRedis(db=15)
        self.db.flushdb()
        self.app = WallApp(config={'db': 15})
        self.app.add_post_type(TestPost)

    def get_new_ioloop(self):
        return IOLoop.instance()
예제 #35
0
__author__ = 'bencassedy'

from race_scraper import RaceScraper, get_race_data
from celery import group
from celery_app import celery
from datetime import datetime
from multiprocessing import Pool
import time
from redis import StrictRedis

BASE_URL = 'http://www.trailrunnermag.com/index.php/races/race-calendar#'

redis = StrictRedis()
redis.flushdb()
scraper = RaceScraper(base_url=BASE_URL)

# fill out the search form
scraper.browser.visit(scraper.base_url)
scraper.browser.find_by_xpath('//select[@name="state"]//option[@value="CO"]').click()
today = datetime.today()
today = today.strftime("%m/%d/%Y")
scraper.browser.find_by_xpath('//input[@id="startdate"]').fill(today)
scraper.browser.find_by_xpath('//input[@id="find-races"]').click()

time.sleep(2)

races = scraper.browser.find_by_xpath('//div[@id="race-calendar"]//tr')
races.pop(0)


# get the races
예제 #36
0
파일: conftest.py 프로젝트: 2doi/freight
def pytest_runtest_teardown(item):
    from redis import StrictRedis
    client = StrictRedis(db=9)
    client.flushdb()
예제 #37
0
파일: pytest.py 프로젝트: delkyd/sentry
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault("_SENTRY_SKIP_CONFIGURATION", "1")

    os.environ.setdefault("RECAPTCHA_TESTING", "True")
    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentry.conf.server")

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get("DB", "sqlite")
        if test_db == "mysql":
            settings.DATABASES["default"].update(
                {"ENGINE": "django.db.backends.mysql", "NAME": "sentry", "USER": "******"}
            )
        elif test_db == "postgres":
            settings.DATABASES["default"].update({"ENGINE": "sentry.db.postgres", "USER": "******", "NAME": "sentry"})
        elif test_db == "sqlite":
            settings.DATABASES["default"].update({"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"})

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ("tests",)
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = "sentry.cache.django.DjangoCache"
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index("sentry.middleware.sudo.SudoMiddleware")
    middleware[sudo] = "sentry.testutils.middleware.SudoMiddleware"
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    settings.SENTRY_OPTIONS["system.url-prefix"] = "http://testserver"

    # enable draft features
    settings.SENTRY_ENABLE_EMAIL_REPLIES = True

    # disable error reporting by default
    settings.SENTRY_REDIS_OPTIONS = {"hosts": {0: {"db": 9}}}

    settings.SENTRY_ALLOW_ORIGIN = "*"

    settings.SENTRY_TSDB = "sentry.tsdb.inmemory.InMemoryTSDB"
    settings.SENTRY_TSDB_OPTIONS = {}

    settings.RECAPTCHA_PUBLIC_KEY = "a" * 40
    settings.RECAPTCHA_PRIVATE_KEY = "b" * 40

    settings.BROKER_BACKEND = "memory"
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DISABLE_RAVEN = True

    settings.CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}

    # Disable South in tests as it is sending incorrect create signals
    settings.SOUTH_TESTS_MIGRATE = bool(os.environ.get("USE_SOUTH"))

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch("socket.getfqdn", return_value="localhost")
    patcher.start()

    client = StrictRedis(db=9)
    client.flushdb()

    from sentry.runner.initializer import initialize_receivers, fix_south

    fix_south(settings)

    initialize_receivers()

    # force celery registration
    from sentry.celery import app  # NOQA
예제 #38
0
class RedisManager(NamespaceManager):
    connection_pools = {}

    def __init__(self,
                 namespace,
                 url=None,
                 data_dir=None,
                 lock_dir=None,
                 expire=None,
                 **params):
        self.db = params.pop('db', None)
        self.dbpass = params.pop('password', None)

        NamespaceManager.__init__(self, namespace)
        if not url:
            raise MissingCacheParameter("url is required")

        if lock_dir:
            self.lock_dir = lock_dir
        elif data_dir:
            self.lock_dir = data_dir + "/container_tcd_lock"
        if hasattr(self, 'lock_dir'):
            verify_directory(self.lock_dir)

        # Specify the serializer to use (pickle or json?)
        self.serializer = params.pop('serializer', 'pickle')

        self._expiretime = int(expire) if expire else None

        conn_params = {}
        parts = url.split('?', 1)
        url = parts[0]
        if len(parts) > 1:
            conn_params = dict(p.split('=', 1) for p in parts[1].split('&'))

        host, port = url.split(':', 1)

        self.open_connection(host, int(port), **conn_params)

    def open_connection(self, host, port, **params):
        pool_key = self._format_pool_key(host, port)
        if pool_key not in self.connection_pools:
            self.connection_pools[pool_key] = ConnectionPool(host=host,
                                                             port=port,
                                                             db=self.db,
                                                             password=self.dbpass)
        self.db_conn = StrictRedis(connection_pool=self.connection_pools[pool_key],
                                   **params)

    def get_creation_lock(self, key):
        return file_synchronizer(identifier="tccontainer/funclock/%s" % self.namespace,
                                 lock_dir=self.lock_dir)

    def __getitem__(self, key):
        if self.serializer == 'json':
            payload = self.db_conn.get(self._format_key(key))
            if isinstance(payload, bytes):
                return json.loads(payload.decode('utf-8'))
            else:
                return json.loads(payload)
        else:
            return pickle.loads(self.db_conn.get(self._format_key(key)))

    def __contains__(self, key):
        return self.db_conn.exists(self._format_key(key))

    def has_key(self, key):
        return key in self

    def set_value(self, key, value, expiretime=None):
        key = self._format_key(key)

        #
        # beaker.container.Value.set_value calls NamespaceManager.set_value
        # however it (until version 1.6.4) never sets expiretime param.
        #
        # Checking "type(value) is tuple" is a compromise
        # because Manager class can be instantiated outside container.py (See: session.py)
        #
        if (expiretime is None) and (type(value) is tuple):
            expiretime = value[1]

        if self.serializer == 'json':
            serialized_value = json.dumps(value, ensure_ascii=True)
        else:
            serialized_value = pickle.dumps(value, 2)

        if expiretime:
            self.db_conn.setex(key, expiretime, serialized_value)
        else:
            self.db_conn.set(key, serialized_value)

    def __setitem__(self, key, value):
        self.set_value(key, value, self._expiretime)

    def __delitem__(self, key):
        self.db_conn.delete(self._format_key(key))

    def _format_key(self, key):
        return 'beaker:{0}:{1}'.format(self.namespace, key.replace(' ', '\302\267'))

    def _format_pool_key(self, host, port):
        return '{0}:{1}:{2}'.format(host, port, self.db)

    def do_remove(self):
        self.db_conn.flushdb()

    def keys(self):
        return self.db_conn.keys('beaker:{0}:*'.format(self.namespace))
예제 #39
0
class DataBaseHandler(object):
    def __init__(self, truncate=False, db_num=15):
        self.redis = StrictRedis(db=db_num)
        if truncate:
            self.redis.flushdb()

    def get_url(self, url_id):
        return self.redis.hget('url_ids', url_id)

    def get_word(self, word_id):
        return self.redis.hget('word_ids', word_id)

    def add_url(self, url):
        url_id = id(url)
        saved = self.redis.hget('url_ids', url_id)
        if not saved:
            self.redis.hset('url_ids', url_id, url)
        return url_id

    def add_word(self, word):
        word_id = id(word)
        saved = self.redis.hget('word_ids', word_id)
        if not saved:
            self.redis.hset('word_ids', word_id, word)
        return word_id

    def set_word_location(self, url_id, word_id, location):
        locations_id = url_have_word(url_id, word_id)
        self.redis.rpush(locations_id, location)
        # url contains words
        self.redis.hset(word_location(url_id), word_id, locations_id)
        # word in urls
        self.redis.hset(url_location(word_id), url_id, locations_id)



    def get_word_location_in_url(self, location_id):
        return [int(el) for el in self.redis.lrange(location_id, 0, -1)]

    def set_link(self, from_url, to_url, via_word):
        from_id = self.add_url(from_url)
        to_id = self.add_url(to_url)
        word_id = self.add_word(via_word)
        self.redis.set(link(from_id, to_id), word_id)
        self.redis.lpush(from_(from_id), to_id)
        self.redis.lpush(_to(to_id), from_id)

    def is_url_saved(self, url):
        return self.redis.get(id(url))

    def get_urls_locations_of_(self, word):
        """
        :returns word_id, {url_id:[locations],...}
        """
        word_id = id(word)
        return word_id, dict([(int(k), self.get_word_location_in_url(v)) \
                              for k, v in self.redis.hgetall(url_location(word_id)).iteritems()])

    def get_words_locations_in_(self, url):
        '''
        :param url:
        :return: url_id, {word_id:[locations]}
        '''
        url_id = id(url)
        return url_id, dict([(int(k), self.get_word_location_in_url(v)) for k, v in
                             self.redis.hgetall(word_location(url_id)).iteritems()])
예제 #40
0
def clean_db():
    db = StrictRedis(host=REDIS['host'], port=REDIS['port'], db=SNAPSHOTS_DB)
    db.flushdb()
예제 #41
0
def pytest_configure(config):
    os.environ.setdefault('RECAPTCHA_TESTING', 'True')
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get('DB', 'sqlite')
        if test_db == 'mysql':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.mysql',
                'NAME': 'sentry',
                'USER': '******',
            })
        elif test_db == 'postgres':
            settings.DATABASES['default'].update({
                'ENGINE': 'sentry.db.postgres',
                'USER': '******',
                'NAME': 'sentry',
            })
        elif test_db == 'sqlite':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.sqlite3',
                'NAME': ':memory:',
            })

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
        'tests',
    )
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = [
        'django.contrib.auth.hashers.MD5PasswordHasher',
    ]

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
    middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    settings.SENTRY_URL_PREFIX = 'http://example.com'

    # enable draft features
    settings.SENTRY_ENABLE_EXPLORE_CODE = True
    settings.SENTRY_ENABLE_EMAIL_REPLIES = True

    # disable error reporting by default
    settings.SENTRY_REDIS_OPTIONS = {'hosts': {0: {'db': 9}}}

    settings.SENTRY_ALLOW_ORIGIN = '*'

    settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
    settings.SENTRY_TSDB_OPTIONS = {}

    settings.RECAPTCHA_PUBLIC_KEY = 'a' * 40
    settings.RECAPTCHA_PRIVATE_KEY = 'b' * 40

    settings.BROKER_BACKEND = 'memory'
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DISABLE_RAVEN = True

    settings.CACHES = {
        'default': {
            'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
        }
    }

    # Disable South in tests as it is sending incorrect create signals
    settings.SOUTH_TESTS_MIGRATE = False

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch('socket.getfqdn', return_value='localhost')
    patcher.start()

    client = StrictRedis(db=9)
    client.flushdb()

    from sentry.utils.runner import initialize_receivers, fix_south
    initialize_receivers()

    fix_south(settings)

    # force celery registration
    from sentry.celery import app  # NOQA