예제 #1
0
def test_token(redis_server):
    conn = StrictRedis(unix_socket_path=UDS_PATH)
    lock = Lock(conn, "foobar-tok")
    tok = lock.id
    assert conn.get(lock._name) is None
    lock.acquire(blocking=False)
    assert conn.get(lock._name) == tok
예제 #2
0
def app(environ, start_response):
	ret = {}
	qs = dict(parse_qsl(environ["QUERY_STRING"]))
	red = StrictRedis(host="localhost", port=6379, db=0)
	root_key = red.get("root")
	if root_key is None: #Load trucks into redis if there is no tree already in it
		inp_file = "Mobile_Food_Facility_Permit.csv"
		load_trucks(inp_file, red)
		root_key = red.get("root")
	try:
		lat = float(qs["latitude"])
		lon = float(qs["longitude"])
		rad = float(qs["radius"])
	except KeyError: #Return error if required fields aren't present
		start_response("400 Bad Request", [("Content-type", "text/plain")])
		ret["error"] = "latitude, longitude, and radius query parameters are required"
		return [dumps(ret)]
	ret["latitude"] = lat
	ret["longitude"] = lon
	ret["radius"] = rad
	food = qs.get("food", "").upper()
	if food:
		ret["food"] = food
		ret["trucks"] = [str(t)
				for t in get_trucks(lat, lon, rad, red, root_key) if food in t.food]
	else:
		trucks = []
		foods = set()
		for t in get_trucks(lat, lon, rad, red, root_key):
			trucks.append(str(t))
			foods |= set(t.food)
		ret["trucks"] = trucks
		ret["foods"] = list(foods)
	start_response("200 OK", [("Content-type", "text/plain")])
	return [dumps(ret)]
예제 #3
0
파일: library.py 프로젝트: Xifax/imus
class Redis:
    """
    To store and query local/remote Redis server.
    """

    def __init__(self, host='localhost',
                       port=6379,
                       db=0,
                       password=None):
        self.r = StrictRedis(host, port, db, password)

    def __del__(self):
        del(self.r)

    def update(self, track):
        self.r.set(track.key, track.stats())

    def lookup(self, keyword):
        # TODO: add option for caseless search
        return self.r.keys('*' + keyword + '*')

    def retrieve(self, key):
        if isinstance(key, list):
            return self.get_all(key)
        return Track.from_redis(key, self.r.get(key))

    def get_all(self, keys):
        tracks = []
        for key in keys:
            tracks.append(Track.from_redis(key, self.r.get(key)))
        return tracks
예제 #4
0
class RedisDataStore(DataStore):
    """Redis-backed datastore object."""

    def __init__(self, number=0):
        redis_host = os.environ.get('REDIS_PORT_6379_TCP_ADDR')
        redis_port = os.environ.get('REDIS_PORT_6379_TCP_PORT')
        self.redis_conn = StrictRedis(host=redis_host, port=redis_port,
                                      db=number)

    def __setitem__(self, k, v):
        self.redis_conn.set(k, v)

    def __getitem__(self, k):
        return self.redis_conn.get(k)

    def __delitem__(self, k):
        self.redis_conn.delete(k)

    def get(self, k):
        return self.redis_conn.get(k)

    def __contains__(self, k):
        return self.redis_conn.exists(k)

    def todict(self):
        #TODO(tvoran): use paginate
        #TODO(tvoran): do something besides multiple gets
        data = {}
        for key in self.redis_conn.keys():
            data[key] = self.get(key)
        return data

    def clear_all(self):
        self.redis_conn.flushdb()
예제 #5
0
class RedisRequestLogger(RequestLoggerBase):
    """A faster alternative to the default logger, but requiring more configuration"""
    
    def __init__(self):
        from redis import StrictRedis
        
        super(RedisRequestLogger, self).__init__()
        self.db = StrictRedis(
            host = getattr(settings, 'API_REDIS_HOST', 'localhost'),
            port = getattr(settings, 'API_REDIS_PORT', 6379),
            db = getattr(settings, 'API_REDIS_DB', 0),
            password = getattr(settings, 'API_REDIS_PASSWORD', '')
        )
    
    def get_key(self, app):
        return 'bambu-api-requests-%s' % app.key
    
    def log_request(self, app):
        """Saves the number of requests within the given timeframe to the Redis database"""
        
        timestamp = self.get_timestamp()
        key = self.get_key(app)
        values = self.db.get(key)
        
        if values:
            try:
                values = pickle.loads(values)
            except:
                values = {}
        else:
            values = {}
        
        if values.has_key(timestamp):
            values = {
                timestamp: values[timestamp] + 1
            }
        else:
            values = {
                timestamp: 1
            }
        
        self.db.set(key, pickle.dumps(values))
    
    def get_request_count(self, app, timestamp):
        """Returns the number of requests by the specified app, within the specified timeframe"""
        
        values = self.db.get(
            self.get_key(app)
        )
        
        if values:
            try:
                values = pickle.loads(values)
            except:
                values = {}
        else:
            values = {}
        
        return values.get(timestamp) or 0
예제 #6
0
파일: oauth2.py 프로젝트: CIR2000/adam
class BearerAuth(BasicAuth):
    """ Overrides Eve's built-in basic authorization scheme and uses Redis to
    validate bearer token
    """
    def __init__(self):
        super(BearerAuth, self).__init__()
        self.redis = StrictRedis()

    def check_auth(self, token, allowed_roles, resource, method):
        """ Check if API request is authorized.

        Examines token in header and checks Redis cache to see if token is
        valid. If so, request is allowed.

        :param token: OAuth 2.0 access token submitted.
        :param allowed_roles: Allowed user roles.
        :param resource: Resource being requested.
        :param method: HTTP method being executed (POST, GET, etc.)
        """
        if not token:
            return False

        user_id = self.redis.get(token)
        if not user_id:
            return False

        # now switching to the user-reserved mongo instance.

        mongo_prefix = 'MONGO%s' % user_id

        # TODO remove defaulting to localhost so exception is raised
        # if db host is not available. Right now, unless redis hodls a
        # key for the user, all dbs are hosted on localhost.
        host = self.redis.get(user_id) or 'localhost'
        if not host:
            raise ConfigException('Cannot locate host for user database %s' %
                                  user_id)

        uri = 'mongodb://%s/%s' % (host, user_id)

        current_app.config['%s_URI' % mongo_prefix] = uri

        self.set_mongo_prefix(mongo_prefix)

        return True

    def authorized(self, allowed_roles, resource, method):
        """ Validates the the current request is allowed to pass through.

        :param allowed_roles: allowed roles for the current request, can be a
                              string or a list of roles.
        :param resource: resource being requested.
        """
        try:
            token = request.headers.get('Authorization').split(' ')[1]
        except:
            token = None
        return self.check_auth(token, allowed_roles, resource, method)
예제 #7
0
파일: redisium.py 프로젝트: d5/redisium
    def get(cls, name):
        db = StrictRedis()

        value = None
        while True:
            ver = db.get(name+'::v')
            value = cls._get(db, name)
            if ver == db.get(name+'::v'):
                break

        return value
예제 #8
0
class RedisRequestLogger(RequestLoggerBase):
	def __init__(self):
		from redis import StrictRedis
		
		super(RedisRequestLogger, self).__init__()
		self.db = StrictRedis(
			host = getattr(settings, 'API_REDIS_HOST', 'localhost'),
			port = getattr(settings, 'API_REDIS_PORT', 6379),
			db = getattr(settings, 'API_REDIS_DB', 0),
			password = getattr(settings, 'API_REDIS_PASSWORD', '')
		)
	
	def get_key(self, app):
		return 'bambu-api-requests-%s' % app.key
	
	def log_request(self, app):
		timestamp = self.get_timestamp()
		key = self.get_key(app)
		values = self.db.get(key)
		
		if values:
			try:
				values = pickle.loads(values)
			except:
				values = {}
		else:
			values = {}
		
		if values.has_key(timestamp):
			values = {
				timestamp: values[timestamp] + 1
			}
		else:
			values = {
				timestamp: 1
			}
		
		self.db.set(key, pickle.dumps(values))
	
	def get_request_count(self, app, timestamp):
		values = self.db.get(
			self.get_key(app)
		)
		
		if values:
			try:
				values = pickle.loads(values)
			except:
				values = {}
		else:
			values = {}
		
		return values.get(timestamp) or 0
예제 #9
0
파일: health.py 프로젝트: fterdalpdx/finti
	def check_health_status(self):
		'''
			Verify that all necessary systems are available and running correctly.
		'''
		
		status = {'result': 'error', 'message': 'failure'}

		self.log.info("check_health_status(): starting health check")

		# Check Redis. Fail right-away if Redis is down - verifies infrastructure
		
		try:
			cache = StrictRedis(db=config.health_cache_redis_db)
			cache.set('test', 'test')
			rv = cache.get('test')
			if rv <> 'test':
				return {'result': 'error', 'message': 'redis is not responding correctly'}
		except Exception as ex:
			self.log.critical("check_health_status() redis is not responding: " + str(ex))
			return {'result': 'error', 'message': 'redis is not responding'}
			
		# Check if down for maintenance
		
		cache = StrictRedis(db=config.health_cache_redis_db)
		is_maintenance = cache.get('is_maintenance')
		if is_maintenance == 'true':
			return {'result': 'error', 'message': 'system is down for maintenance'}

		# Do a web request for all buildings - verifies data quality
		
		try:
			cache = StrictRedis(db=config.tokens_cache_redis_db)
			token = config.test_token
			token_hash = auth.calc_hash(token)
			cache.set(token_hash, 'test@test')
			r = requests.get('http://localhost:8888/org/v1/buildings', auth=(token, ''))
			buildings = r.json()
			
			if len(buildings) < 65 or len(buildings) > 90:
				self.log.critical("check_health_status(): building data failure")
				return {'result': 'error', 'message': 'building data failure'}
			
			#cache.delete(token_hash)
		except Exception as ex:
			self.log.critical("check_health_status(): building data failure: " + str(ex))
			return {'result': 'error', 'message': 'building data failure: ' + str(ex)}
		
		# Check db -- if down set flag to not expire data. Do not fail if db is down
		

		status = {'result': 'success', 'message': "success"}
		
		return status
예제 #10
0
    def store(self):
        from simplekv.memory.redisstore import RedisStore
        r = StrictRedis()

        try:
            r.get('anything')
        except ConnectionError:
            pytest.skip('Could not connect to redis server')

        r.flushdb()
        yield RedisStore(r)
        r.flushdb()
예제 #11
0
class TaskCache(ITaskCache):
    """
    `self.user_store': {'user_id': <list of task_ids>}
    `self.task_store`: {'task_id`: <AsyncResult>}
    `self.progress_store`: {`task_id`: `progress_value(int)`}
    """
    def __init__(self, user_kwargs, task_kwargs ,progress_kwargs):
        self.user_store = StrictRedis(**user_kwargs)
        self.task_store = StrictRedis(**task_kwargs)
        self.progress_store = LingvodocRedisClient(**progress_kwargs)

    def get(self, user, remove_finished=False):
        result = dict()
        tasks = self.user_store.get(user.id)
        if tasks is None:
            return {}
        tasks = pickle.loads(tasks)
        remained_tasks = list()
        for t in tasks:
            val = self.task_store.get(t)
            if val is None:
                continue
            async_result = pickle.loads(val)
            progress = self.progress_store.get(t)
            # Redis client returns byte array. We need to decode it
            if progress is not None:
                progress = int(progress.decode())
            result[t] = {'finished': async_result.ready(),
                         'progress': progress}
            if remove_finished:
                if async_result.ready():
                    self.task_store.delete(t)
                else:
                    remained_tasks.append(t)
        if remove_finished:
            self.user_store.set(user.id, pickle.dumps(remained_tasks))
        return result


    # TODO: add try/catch handlers.
    # we should remove the task from caches (and queue?) if exception is raised
    def set(self, user, task_key, async_task):
        self.task_store.set(task_key, pickle.dumps(async_task))
        cached = self.user_store.get(user.id)
        if cached is None:
            tmp_tasks = [task_key]
        else:
            tmp_tasks = pickle.loads(cached)
            tmp_tasks.append(task_key)
        self.user_store.set(user.id, pickle.dumps(tmp_tasks))
예제 #12
0
파일: __init__.py 프로젝트: ShawnMilo/milo
def polite_get(url, ttl=TTL, db=0, port=6379):

    """
    Don't hammer the remote servers.

    1. They don't update that often anyway.
    2. We don't want to get throttled or banned.
    3. It's polite.

    Accepts kwargs for ttl, db, and port; otherwise
    uses Redis defaults and a one-hour ttl.
    """

    db = StrictRedis(db=db, port=port)

    key = "url_cache::{0}".format(url)
    result = db.get(key)

    if result is None:

        page = requests.get(url)
        result = page.text
        db.setex(key, ttl, result.encode('utf-8'))

    else:
        result = result.decode('utf-8')

    return result
예제 #13
0
파일: cache.py 프로젝트: microamp/nanodata
class CacheHelper(object):
    """A thin wrapper on top of Redis."""
    def __init__(self, host="localhost", port=6379, db=0):
        self.host = host
        self.port = port
        self.db = db

    def __enter__(self):
        self.r = StrictRedis(host=self.host, port=self.port, db=self.db)
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        try:
            if any((exc_type, exc_value, traceback,)):
                raise
        finally:
            pass

    def get(self, key):
        return self.r.get(key) if key in self.r.keys() else None

    def set(self, key, json):
        self.r.set(key, json)

    def keys(self):
        return self.r.keys()

    def reset(self):
        for key in self.keys():
            self.r.delete(key)
예제 #14
0
 def sync_get(self, identity, *args, **kwargs):
     """
     For getting data from cache
     :param identity: Unique Integer for the data
     :param args: Args for the sync function. (Default: None)
     """
     redis = StrictRedis(connection_pool=self.redis_pool)
     key = key_generator(self.key, identity)
     try:
         if redis.exists(key):
             data = self.get_func(redis.get(key))
         else:
             data = self.sync_func(identity, *args, **kwargs)
             if self.expire:
                 self._setex(redis, key, self.set_func(data))
             else:
                 redis.set(key, self.set_func(data))
         if data is not None or data != "":
             return data
         return None
     except RedisError as re:
         self.log.error("[REDIS] %s", str(re))
         data = self.sync_func(identity, args)
         return data
     finally:
         del redis
예제 #15
0
파일: redis.py 프로젝트: pombredanne/lineup
class JSONRedisBackend(BaseBackend):

    def __init__(self):
        self.redis = StrictRedis()

    def serialize(self, value):
        return json.dumps(value)

    def deserialize(self, value):
        return value and json.loads(value) or None

    def get(self, key):
        value = self.redis.get(key)
        result = self.deserialize(value)
        return result

    def set(self, key, value):
        product = self.serialize(value)
        return self.redis.set(key, product)

    def rpush(self, key, value):
        product = self.serialize(value)
        return self.redis.rpush(key, product)

    def lpush(self, key, value):
        product = self.serialize(value)
        return self.redis.lpush(key, product)

    def lpop(self, key):
        value = self.redis.lpop(key)
        result = self.deserialize(value)
        return result

    def llen(self, key):
        return self.redis.llen(key)

    def rpop(self, key):
        value = self.redis.rpop(key)
        result = self.deserialize(value)
        return result

    def report_steps(self, name, consumers, producers):
        pipeline = self.redis.pipeline()
        producers_key = ':'.join([name, 'producers'])
        consumers_key = ':'.join([name, 'consumers'])

        for consumer in consumers:
            pipeline.sadd(consumers_key, consumer)

        for producer in producers:
            pipeline.sadd(producers_key, producer)

        pipeline.smembers(consumers_key)
        pipeline.smembers(producers_key)

        result = pipeline.execute()
        all_consumers = result[-2]
        all_producers = result[-1]

        return all_consumers, all_producers
예제 #16
0
파일: user.py 프로젝트: timetraq/tt-server
    def __admin_handler(self, endpoint: bytes):
        """
        Handle Admin Request

        :param bytes endpoint: Endpoint (in bytes!)
        :return: jsonified answer data
        """
        json_data = request.get_json()
        if json_data is None:
            raise BadRequest()
        if 'admin_token' not in json_data:
            raise BadRequest()
        admin_token = json_data['admin_token']
        if not isinstance(admin_token, str):
            raise BadRequest()
        if not RULE_TOKEN.match(admin_token):
            raise BadRequest()
        redis = StrictRedis(connection_pool=self.api_pool)
        ep_key = 'ADMIN_TOKEN:{:s}'.format(admin_token)
        should_endpoint = redis.get(ep_key)
        if should_endpoint is None:
            raise BadRequest()
        redis.delete(ep_key)
        if should_endpoint != endpoint:
            raise BadRequest()
        if 'data' not in json_data:
            raise BadRequest()
        data = json_data['data']
        if not isinstance(data, dict):
            raise BadRequest()
        return jsonify(self.queue_dispatcher({
            '_': 'admin:{:s}'.format(endpoint.decode('utf-8')),
            'data': data,
        }))
예제 #17
0
파일: redismq.py 프로젝트: ezbake/redisMQ
class RedisProducer(object):
    def __init__(self, hostname = 'localhost', port = 6379):
        log.debug("Initializing RedisProducer with hostname of %s and port %s" % (hostname, port))
        self.r = StrictRedis(host = hostname, port = port)

    def send(self, message):
        tries = 0
        next_index_key = get_next_index_for_topic_key(message.topic)
        next_index = 1
        result = None
        log.debug("Sending message on topic %s" % message.topic)

        while result is None and tries < TRIES_LIMIT:
            if self.r.exists(next_index_key):
                next_index = long(self.r.get(next_index_key)) + 1

            message_key = get_message_key(message.topic, next_index)

            try:
                pl = self.r.pipeline()
                pl.watch(next_index_key, message_key)
                pl.multi()
                pl.incr(next_index_key).set(message_key, message.payload)
                result = pl.execute()
            except WatchError:
                # Should probably log something here, but all it means is we're
                # retrying
                pass

        if result is None:
            log.error("Could not send message, retry amount exceeded")
            raise RuntimeError("Attempted to send message %s times and failed" % TRIES_LIMIT)
class MispRedisConnector(object):

    def __init__(self):
        self.r = StrictRedis(unix_socket_path=redis_socket)

    def search(self, authkey, values=None, hash_values=None, return_eid=False, quiet=False):
        if isinstance(values, list):
            hash_values = [SHA256.new(v.lower()).hexdigest() for v in values]
        elif values:
            hash_values = [SHA256.new(values.lower()).hexdigest()]
        elif not isinstance(hash_values, list):
            hash_values = [hash_values]

        if not hash_values:
            raise Exception('No value to search.')

        org = self.__get_org_by_auth(authkey)
        if not org:
            raise Exception('Invalid authkey')

        if quiet:
            return [(self.r.exists(h) or self.r.exists(org + ':' + h)) for h in hash_values]
        uuid_by_hashes = [self.r.smembers(h).union(self.r.smembers(org + ':' + h)) for h in hash_values]
        if not return_eid:
            to_return = uuid_by_hashes
        else:
            to_return = []
            for h in uuid_by_hashes:
                to_return.append([self.r.hget('uuid_id', uuid) for uuid in h])
        return to_return

    def __get_org_by_auth(self, authkey):
        return self.r.get(authkey)
예제 #19
0
class BearerAuth(BasicAuth):
    """ Overrides Eve's built-in basic authorization scheme and uses Redis to
    validate bearer token
    """

    def __init__(self):
        super(BearerAuth, self).__init__()
        self.redis = StrictRedis()

    def check_auth(self, token, allowed_roles, resource, method):
        """ Check if API request is authorized.

        Examines token in header and checks Redis cache to see if token is
        valid. If so, request is allowed.

        :param token: OAuth 2.0 access token submitted.
        :param allowed_roles: Allowed user roles.
        :param resource: Resource being requested.
        :param method: HTTP method being executed (POST, GET, etc.)
        """
        return token and self.redis.get(token)

    def authorized(self, allowed_roles, resource, method):
        """ Validates the the current request is allowed to pass through.

        :param allowed_roles: allowed roles for the current request, can be a
                              string or a list of roles.
        :param resource: resource being requested.
        """
        try:
            token = request.headers.get("Authorization").split(" ")[1]
        except:
            token = None
        return self.check_auth(token, allowed_roles, resource, method)
예제 #20
0
파일: db.py 프로젝트: zeglor/tictactoe_py
class DbRedis(Db):
    def __init__(self):
        super().__init__()
        self.redis = StrictRedis(**dbSettings)

    def generateKey(self):
        return self.redis.incr("id")

    def store(self, key, objSerial):
        self.redis.setex(key, TTL, objSerial)

    def retrieve(self, key):
        return self.redis.get(key)

    def lenList(self, name):
        return self.redis.llen(name)

    def listAppend(self, name, val):
        self.redis.lpush(name, val)

    def listPopLeft(self, name):
        return self.redis.lpop(name)

    def retrieveList(self, name):
        return self.redis.lrange(name, 0, -1)

    def removeFromList(self, name, item):
        self.redis.lrem(name, item, 0)

    def keyExists(self, key):
        return self.redis.exists(key)
예제 #21
0
def archive_events():
    print("Starting to archive events.")
    redis = StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=0)
    s3 = tinys3.Connection(AWS_ACCESS_KEY_ID,
                           AWS_SECRET_ACCESS_KEY,
                           default_bucket=S3_BUCKET,
                           endpoint=S3_ENDPOINT,
                           tls=True)

    now = datetime.utcnow()
    nowstring = now.strftime("%Y-%m-%d")

    # 1. We collect all events objects from the query time span
    # and group them by day.

    # keep everything in RAM, keyed by day
    events = {}
    count = 0
    for event in fetch_events():
        event_datetime = datetime.fromtimestamp(event["created"])
        daystring = event_datetime.strftime("%Y-%m-%d")
        if daystring == nowstring:
            continue
        if daystring not in events:
            events[daystring] = []
        count += 1
        events[daystring].append(event)
    print("Fetched %d events." % count)

    # 2. We go through day by day and see where new entries have been added
    # since the last run. For these we create a backup file.

    redis_retention = REDIS_RETENTION_DAYS * 24 * 60 * 60

    for daystring in sorted(events.keys()):
        print("Processing day %s with %d events" % (daystring, len(events[daystring])))

        # possibly skip days already archived
        redis_value = redis.get(daystring)
        if redis_value is not None:
            num_entries_before = int(redis_value)
            if len(events[daystring]) <= num_entries_before:
                print("Skipping day %s, already archived" % daystring)
                continue

        try:
            target_path = upload_dump(daystring, events[daystring], s3)
            print("Uploaded %s" % target_path)
            # write number of entries per day to redis
            redis.setex(daystring, redis_retention, str(len(events[daystring])))
        except Exception as ex:
            sys.stderr.write("ERROR: No backup created for %s\n" % daystring)
            sys.stderr.write(traceback.format_exc() + "\n")

    del redis
    del s3
    del events

    print("Done for today.")
예제 #22
0
def search():
    query = request.args["q"]
    ands = request.args["and"] if "and" in request.args else ""
    ors = request.args["or"] if "or" in request.args else ""
    nots = request.args["not"] if "not" in request.args else ""

    try:
        page = int(request.args["page"])
    except:
        page = 1

    # cache server
    redis = StrictRedis(host="localhost")

    # start measuring elapsed time
    start = time.time()

    # try to load result from cache
    key = json.dumps({
        "q": query,
        "a": ands,
        "o": ors,
        "n": nots
    })
    cache_result = redis.get(key)

    items = cPickle.loads(cache_result) if cache_result else None
    if not items:
        items = SearchEngine(get_db(), query, ands, nots, ors) \
            .search()

        # store results in cache
        redis.set(key, cPickle.dumps(items))

    elapsed_time = (time.time() - start) * 1000

    items_count = len(items)
    pages_count = items_count // 10
    if pages_count < items_count / 10.0:
        pages_count += 1

    if page > pages_count:
        page = pages_count

    # convert to index based
    page -= 1

    # calculate last item within this page
    last_item_index = (page * 10) + 10

    json_data = json.dumps({
        "items": items[page * 10: last_item_index if last_item_index < items_count else items_count],
        "total_count": items_count,
        "elapsed_time": int(elapsed_time)
    }, cls=SuperListJsonEncoder)

    return Response(response=json_data,
                    status=200,
                    mimetype="application/json")
예제 #23
0
def view_user_json(username):
    """The twitter user JSON view"""
    username = username.lower()
    redis_key = "%s.user.%s" % (REDIS_PREFIX, username)
    redis = StrictRedis()
    cache = redis.get(redis_key)
    if not cache:
        cache = dict(status='queued', header='Queued',
                     message="Your request will be processed shortly",
                     code=200)
        redis.set(redis_key, dumps(cache))
        redis.expire(redis_key, CACHE_HOURS*60*60)
        load_tweets.delay(username)
        sleep(.5)
    cache = loads(redis.get(redis_key))

    return jsonify(cache)
예제 #24
0
class RedisBackend(Backend):

    def __init__(self, config):
        super(RedisBackend, self).__init__(config)
        self.redis = StrictRedis(host=config.get("host", "localhost"),
                                 port=config.get("port", 6379),
                                 db=config.get("db", 0))
        self.namespace = config.get("namespace", "short:")

    def furl(self, name):
        return self.namespace + "url:" + name

    def fvisits(self, name):
        return self.namespace + "visits:" + name

    def next_name(self):
        name = None
        while 1:
            name = hashids.encrypt(
                self.redis.incr(self.namespace + "meta:num"))
            if not self.exists(name):
                break
        return name

    def exists(self, name):
        return self.redis.exists(self.furl(name))

    def set(self, link):
        if self.redis.exists(self.furl(link.name)):
            raise NameUnavailableError(link.name)
        self.redis.set(self.furl(link.name), link.url)
        self.redis.set(self.fvisits(link.name), 0)

    def get(self, name):
        rawlink = self.redis.get(self.furl(name))
        if not rawlink:
            raise NotFoundError(name)
        link = Link(name=name,
                    url=rawlink.decode("utf-8"),
                    visits=int(
                        self.redis.get(self.fvisits(name)) or 0
                    ))
        return link

    def visit(self, name):
        self.redis.incr(self.fvisits(name))
예제 #25
0
파일: cache.py 프로젝트: Esiravegna/domus
class RedisCache(object):

    def __init__(self, params={}):
        self._validate(params)

        if not self.server:
            raise Exception('Redis Server Not Defined')

        try:
            log.debug('Connecting to redis at [%s]?[%s]' % (self.server, self.database))
            self.cache = StrictRedis(self.server, port=self.port, db=self.database)
        except ConnectionError as ex:
            raise Exception("Unable to connect to Redis", ex)

    def get(self, key):
        """
        Fetch a given key from the cache. If the key does not exist, return
        default, which itself defaults to None.
        """
        ckey = self._create_key(key)
        log.debug("Getting the cache key [%s]" % ckey)
        return self.cache.get(ckey)

    def ping(self):
        """
        This command is often used to test if the cache is still alive, or to measure latency.
        """
        log.debug("Ping to the cache")
        return self.cache.ping()

    def store(self, key, value, expires=None):
        """
        Set a value in the cache. If timeout is given, that timeout will be
        used for the key; otherwise the default cache timeout will be used.
        """
        ckey = self._create_key(key)
        log.debug("Storing the cache key [%s]" % ckey)
        return self.cache.set(ckey, value, ex=expires)

    def delete(self, key):
        """
        Delete a key from the cache, failing silently.
        """
        ckey = self._create_key(key)
        log.debug("Deleting the cache key [%s]" % ckey)
        return self.cache.delete(ckey)

    def _validate(self, params):
        """
        Initialize all the needed parameters
        """
        self.server = params.get('server', 'localhost')
        self.port = params.get('port', 6379)
        self.database = params.get('database', 2)
        self.key_prefix = params.get('key_prefix', 'domus')

    def _create_key(self, key):
        return "%s.%s" % (self.key_prefix, key)
예제 #26
0
파일: redismq.py 프로젝트: ezbake/redisMQ
class RedisConsumer(object):
    def __init__(self, timeout, group_id, hostname = 'localhost', port = 6379):
        self.group_id = group_id
        self.timeout = timeout
        log.debug("Initializing RedisConsumer with hostname of %s and port %s" % (hostname, port))
        self.r = StrictRedis(host = hostname, port = port)

    def poll(self, topic):
        result = None
        current_index_key = get_next_index_for_topic_key(topic)
        end_millis = time.time() * 1000 + self.timeout
        log.debug("Polling topic %s" % topic)

        while time.time() * 1000 < end_millis:
            if self.r.exists(current_index_key):
                current_index = long(self.r.get(current_index_key))
                consumer_index_key = get_next_index_for_group_id_key(topic, self.group_id)
                pl = self.r.pipeline()

                pl.watch(consumer_index_key)
                consumer_index = 0
                if self.r.exists(consumer_index_key):
                    consumer_index = long(self.r.get(consumer_index_key))

                if current_index > consumer_index:
                    try:
                        pl.multi()
                        pl.incr(consumer_index_key)

                        incr_result = pl.execute()

                        if not incr_result is None and len(incr_result) > 0:
                            consumer_index = long(incr_result[0])
                            key = get_message_key(topic, consumer_index)
                            if self.r.exists(key):
                                result = self.r.get(key)
                                break
                    except WatchError:
                        log.debug("Redis keys changed for topic %s and group %s, trying again" % (topic, self.group_id))
                        pass

        return result

    def unsubscribe_from_topic(self, topic):
        self.r.delete(get_next_index_for_group_id_key(topic, self.group_id))
class TestTransactionOperations(unittest.TestCase):

    def setUp(self):
        video_data = {
            'duration' : 22,
            'random' : 0.31461311114729994,
            'vimeoid' : 24879869,
            'description' : 'The story of a cartoon character', 
            'title' : 'Overcast'
        }
        self.database = connection()
        self.video_id = str(self.database.videos.insert(video_data))
        test_user_name = 'test user: {0}'.format(repr(self))
        test_user = dict(name=test_user_name, chances=222)
        uid = self.database.accounts.save(test_user)
        self.test_user_id = str(uid)
        redis_url = os.environ.get('REDISTOGO_URL')
        url = urlparse(redis_url)
        self.redis = StrictRedis(host=url.hostname, port=url.port, 
                                 password=url.password)
        self.prefix = 'watch-transaction:'

    def tearDown(self):
        uid = ObjectId(self.test_user_id)
        self.database.accounts.remove(uid)
        self.database.videos.remove(ObjectId(self.video_id))
        self.database.screening.remove({'user.id': uid})

    def test_begin(self):
        user = users.find(self.test_user_id)
        video = videos.find(self.video_id)
        watchtran.begin(user, video)
        key = self.prefix + self.test_user_id
        val = self.redis.get(key)
        self.assertIsNotNone(val)

    def test_commit_without_begin(self):
        user = users.find(self.test_user_id)
        video = videos.find(self.video_id)
        result = watchtran.commit(user, video)
        self.assertIsNone(result)

    def test_commit_too_early(self):
        user = users.find(self.test_user_id)
        video = videos.find(self.video_id)
        watchtran.begin(user, video)
        result = watchtran.commit(user, video)
        self.assertIsNone(result)

    def test_commit(self):
        user = users.find(self.test_user_id)
        video = videos.find(self.video_id)
        video._duration = 0
        watchtran.begin(user, video)
        result = watchtran.commit(user, video)
        self.assertIsNotNone(result)
예제 #28
0
class testTaggedCache(unittest.TestCase):
    def setUp(self):
        self.redis = StrictRedis(db=DATABASE)
        self.redis.flushdb()
        self.cache = TaggedCache(self.redis)

    def testEverything(self):
        cache = self.cache

        # Set
        cache.set("User:1", "mitch", tags=["User", "PremiumUser"])
        cache.set("User:2", "foo", tags=["User"])
        cache.set("Post:1", "Hello World!", tags=["Post"])
        cache.set("Post:2", "Hello World, again!", tags=["Post"])
        
        self.assertEquals(cache.get("Post:1"), "Hello World!")
        self.assertEquals(cache.get_keys("Post"), set(["Post:1", "Post:2"]))
        self.assertEquals(cache.get_keys("User"), set(["User:1", "User:2"]))
        self.assertEquals(cache.get_tags("User:1"), set(["User", "PremiumUser"]))
        self.assertEquals(cache.get_tags("User:2"), set(["User"]))

        # Delete all post cache entries
        cache.clear_tag("Post")
        self.assertEquals(cache.get("Post:1"), None, "Post:1 still exists")
        self.assertEquals(cache.get("Post:2"), None, "Post:2 still exists")
        
        # Delete User 2 from cache
        cache.clear("User:2")
        self.assertEquals(cache.get_tags("User:2"), set())

        # Clear everything else
        cache.clear_all()
        cache.gc()

        self.assertEquals(self.redis.get("CacheKeys"), None)
        self.assertEquals(self.redis.get("Tags"), None)
        
        self.assertEquals(len(self.redis.keys("*")), 0, "Some keys were not gc'ed")

    def testExpireTtl(self):
        self.cache.set("ExpireMe", "foo", ttl=1)
        time.sleep(2)
        self.assertEquals(self.cache.get("ExpireMe"), None)
예제 #29
0
파일: health.py 프로젝트: fterdalpdx/finti
	def stat_maint(self):
		self.log.info("stat_maint(): checking maintenance status")
		cache = StrictRedis(db=config.health_cache_redis_db)
		is_maintenance = cache.get('is_maintenance')
		if is_maintenance == 'true':
			self.log.info("stat_maint(): in maintenance mode")
			return True
		else:
			self.log.info("stat_maint(): not in maintenance mode")
			return False
예제 #30
0
파일: api.py 프로젝트: asimkhaja/monaco
def app_redis_api(app_id):
    '''
    Simple REST api to redis DBs.
    GET, PUT, DELETE are key operations, and POST allows for any command

    method = 'GET': ../redis?key=key
        return r.get('key')
    method = 'PUT': ../redis?key=key&val=val
        return r.set('key', 'val')
    method = 'DELETE': ../redis?key=key
        return r.delete('key')
    method = 'POST': ../redis?cmd=hset&args=key,hashkey,hashval
        Request args
        return getattr(r,cmd)(*args.split(','))
        aka    r.hset(key, hashkey, hashval)
    '''
    r = rediscli()
    monaco = schema.Monaco()
    monaco.refresh(r)
    app_id = str(app_id)
    if not app_id in monaco.app_ids:
        abort(404)
    userapp = schema.App(app_id=app_id)
    userapp.refresh(r)

    master_host = None
    for node_id, role in userapp.nodes.iteritems():
        if role == 'master':
            master_host = monaco.hostnames_by_node_id[node_id]
            break
    assert master_host != None

    r = StrictRedis(master_host, userapp.port)

    if request.method == 'GET':
        if 'key' not in request.args:
            abort(400)
        return r.get(request.args['key'])
    if request.method == 'PUT':
        if 'key' not in request.args or 'val' not in request.args:
            abort(400)
        return r.set(request.args['key'], request.args['val'])
    if request.method == 'DELETE':
        if 'key' not in request.args:
            abort(400)
        return r.delete(request.args['key'])
    if request.method == 'POST':
        if 'cmd' not in request.args or not hasattr(r, request.args['cmd']):
            abort(400)
        if 'args' in request.args:
            args = request.args['args'].split(',')
        else:
            args = []
        return getattr(r, request.args['cmd'])(*args)
    abort(400)
예제 #31
0
class RedisClient:
    def __init__(self, host='127.0.0.1', port=6379):
        self.r = StrictRedis(host=host, port=port, db=0)

    def get(self, key):
        return self.r.get(key)

    def put(self, key, value):
        return self.r.set(key, value, nx=True)

    def update(self, key, value):
        return self.r.set(key, value, xx=True)

    def remove(self, key):
        return self.r.delete(key)

    def remove_all(self):
        return self.r.flushdb()
예제 #32
0
def get_dict_from_redis(
    redis_client: redis.StrictRedis,
    key_format: str,
    key: str,
) -> Optional[Dict[str, Any]]:
    # This function requires inputting the intended key_format to validate
    # that the key fits it, as an additionally security measure. This protects
    # against bugs where a caller requests a key based on user input and doesn't
    # validate it - which could potentially allow users to poke around arbitrary redis keys.
    if len(key) > MAX_KEY_LENGTH:
        error_msg = "Requested key too long in get_dict_from_redis: %s"
        raise ZulipRedisKeyTooLongError(error_msg % (key, ))
    validate_key_fits_format(key, key_format)

    data = redis_client.get(key)
    if data is None:
        return None
    return orjson.loads(data)
예제 #33
0
class Redis():
    def __init__(self, host, port):
        self.redis = StrictRedis(host=host, port=port, db=0, password=None)

    def addKey(self, key, value):
        self.redis.set(key, value)

    def setExpireTime(self, key, expireTime) -> bool:
        return self.redis.expire(key, expireTime)

    def deleteKey(self, key):
        self.redis.delete(key)

    def containsKey(self, key):
        return self.redis.exists(key)

    def getValue(self, key):
        return self.redis.get(key)
예제 #34
0
    def get_q(self, redis: StrictRedis):
        """ Get a queue object with this ID for a given ``redis`` connection. """
        if (self._buffer is None or self._serializer is None):
            res = redis.get(self._key + OPEN_APPEND)
            if res:
                chid = pickle.loads(res)
                self._buffer = chid.buffer
                self._serializer = chid.serializer
                self.opened = chid.opened

        if self._buffer is not None and self._serializer is not None:
            return Ch(redis,
                      self._key,
                      buffer=self._buffer,
                      serializer=self._serializer,
                      opened=self.opened)

        return None
예제 #35
0
class RedisBrain(object):
    def __init__(self, redis_url):
        try:
            self.redis = StrictRedis(host=redis_url, port=7777, db=0)
        except:
            self.redis = None

    def set(self, key, value):
        if self.redis:
            self.redis.set(key, value)
            return True
        else:
            return False

    def get(self, key):
        if self.redis:
            return self.redis.get(key)
        return None
예제 #36
0
class RedisCache(BaseCache):
    def __init__(self, redis_host, redis_port, default_timeout):
        super(RedisCache, self).__init__(default_timeout=default_timeout)
        self.redis_connection = StrictRedis(host=redis_host,
                                            port=redis_port,
                                            db=1)

    def _put(self, context, params, item, timeout):
        key = RedisCache._make_key(context, params)
        self.redis_connection.set(key, item, ex=timeout)
        context_key = RedisCache._make_context_key(context)
        self.redis_connection.sadd(context_key, key)
        self.redis_connection.expire(context_key, self.timeout())

    def _get(self, context, params):
        item = None
        key = RedisCache._make_key(context, params)
        context_key = RedisCache._make_context_key(context)
        if self.redis_connection.sismember(context_key, key):
            item = self.redis_connection.get(key)
            if item is None:
                self.redis_connection.srem(context_key, key)
            else:
                self.redis_connection.expire(key, self.timeout())
                self.redis_connection.expire(context_key, self.timeout())
        return item

    def _clear(self, context):
        context_key = RedisCache._make_context_key(context)
        pipe = self.redis_connection.pipeline()
        item = self.redis_connection.spop(context_key)
        while item is not None:
            pipe.delete(item)
            item = self.redis_connection.spop(context_key)
        pipe.execute(raise_on_error=True)

    @staticmethod
    def _make_key(context, params):
        params = json.dumps(params, ensure_ascii=True, sort_keys=True)
        return '{}:{}'.format(context, params)

    @staticmethod
    def _make_context_key(context):
        return RedisCache._make_key('ctx', context)
예제 #37
0
class redis_session:

    prefix = 'was:session_key:' # Redis key 앞에 넣을 값

    server_ip = 'localhost' # Redis ip 

    port = 6379

    timeout = 3600

   

    def __init__(self):

        self.db = StrictRedis(self.server_ip, self.port)


    # 세션이 있으면 타임아웃 만큼 다시 연장해주고 없으면 False 있으면 사용자id 리턴

    def get_session(self, session_key):

        inst = self.db.get(self.prefix+session_key)

   

        if inst:

            self.db.expire(self.prefix+session_key, self.timeout)

        return inst

   

    # 신규 세션 요청 시 세션 값을 만들어서 리턴

    def save_session(self, user_name):

        session_key = str(uuid4())

        self.db.setex(self.prefix+session_key, user_name, self.timeout)

   

        return session_key
예제 #38
0
class RateLimiter(cherrypy.Tool):
    def __init__(self, limit=100, window=60):
        cherrypy.Tool.__init__(self, 'before_handler',
                               self.process_request, priority=10)
        cherrypy.log("Creating rate limiter with limit={} and window={}".format(limit, window))
        self.limit = limit
        self.window = window
        self.redis = StrictRedis(host='redis', port=6379)

    def process_request(self):
        print(cherrypy.request)
        print(cherrypy.request.remote)
        requester = cherrypy.request.remote.ip
        print("remote:", requester)

        # un-comment if you want to ignore calls from localhost
        # if requester == '127.0.0.1':
        #     return

        key = "{0}: {1}".format(requester, cherrypy.request.path_info)
        print('Key: {0}'.format(key))

        try:
            remaining = self.limit - int(self.redis.get(key))
        except (ValueError, TypeError):
            remaining = self.limit
            self.redis.set(key, 0)

        expires_in = self.redis.ttl(key)

        if expires_in == -1:
            self.redis.expire(key, self.window)
            expires_in = self.window

        cherrypy.request.headers.update({
            'X-RateLimit-Remaining: ': str(remaining - 1),
            'X-RateLimit-Limit: ': str(self.limit),
            'X-RateLimit-Reset: ': str(time.time() + expires_in)
        })

        if remaining > 0:
            self.redis.incr(key, 1)
        else:
            raise cherrypy.HTTPError(429, 'Blocked: Too many requests!')
예제 #39
0
class Store:
    def __init__(self):
        self.redis = StrictRedis(host="localhost",
                                 db=0,
                                 socket_connect_timeout=2,
                                 socket_timeout=2)

    def get(self, name, id):
        if int(id) == 0:
            return None
        item = self.redis.lindex(name, int(id) - 1)
        if item:
            return json.loads(item)

    def get_all(self, name):
        return [json.loads(item) for item in self.redis.lrange(name, 0, -1)]

    def add(self, value):
        new_id = self.redis.llen(value.path()) + 1
        value.set_id(new_id)
        self.redis.rpush(value.path(), value.serialize())
        return new_id

    def update(self, value):
        items = self.get_all(value.path())

        for idx, item in enumerate(items):
            if value.equal(item):
                value.set_id(idx + 1)
                self.redis.lset(value.path(), idx, value.serialize())
                return idx + 1

        return self.add(value)

    def set_item(self, key, value):
        self.redis.set(key, json.dumps(value))

    def get_item(self, key):
        item = self.redis.get(key)
        if item:
            return json.loads(item)

    def clear(self):
        self.redis.flushdb()
예제 #40
0
def execute_redis_db(key):
    """
    :param key: 要查的key值
    :return:查到的结果
    """
    value = None
    if key is None:
        return value
    try:
        host, port, db, password = get_redis_config()
        redis_pool = StrictRedis(host=host,
                                 port=port,
                                 db=db,
                                 password=password)
        log.error(redis_pool.keys())
        value = redis_pool.get(key)
    except Exception as e:
        log.error(e)
    return value
예제 #41
0
def login():
    username = request.json.get('username') or request.json.get('mail')
    password = request.json.get('password')

    if not username or not password:
        abort(404)

    accounts = app.data.driver.db['accounts']
    user = accounts.find_one({'mail': username})
    #print user
    if not user:
        status = "error"
        message = "user not exist"
        return jsonify(status=status, messag=message)

    if not verify_password(password, user['hash_password']):
        status = "error"
        message = "password invalid"
        return jsonify(status=status, messag=message)

    status = "success"
    message = "login ok"
    redis = StrictRedis()
    #token = generate_auth_token(mail=username)
    payload = {
        "mail": username,
        "iss": app.config['JWT_ISSUER'],
        'aud': app.config['JWT_AUDIENCES']
    }

    token = jwt.encode(payload, key=app.config['JWT_SECRET'])
    print token
    redis = StrictRedis()
    redis.set(username, token)
    token = redis.get(username)

    # detoken = jwt.decode(token, key=app.config['JWT_SECRET'])
    # print "decode token"
    # print detoken
    out = jsonify(status=status, messag=message, token=token)
    out.set_cookie('jwttoken', token)
    #return jsonify(status=status,messag=message, token=token)
    return out
예제 #42
0
class RedisCache:
    def __init__(self, expires=timedelta(hours=10), encoding='utf-8'):
        self.client = StrictRedis(host=os.environ.get('REDIS_HOST',
                                                      'localhost'),
                                  port=6379,
                                  db=0)
        self.expires = expires
        self.encoding = encoding

    def __getitem__(self, url):
        record = self.client.get(url)
        if record:
            return json.loads(record.decode(self.encoding))
        else:
            raise KeyError(url + ' does not exist')

    def __setitem__(self, url, result):
        data = bytes(json.dumps(result), self.encoding)
        self.client.setex(url, self.expires, data)
예제 #43
0
class RedisBrain(object):
    def __init__(self):
        try:
            self.redis = StrictRedis(host=REDIS_URL)
        except Exception as e:
            logger.error(e)
            self.redis = None

    def set(self, key, value):
        if self.redis:
            self.redis.set(key, value)
            return True
        else:
            return False

    def get(self, key):
        if self.redis:
            return self.redis.get(key)
        return None
예제 #44
0
    def get_q(self, redis: StrictRedis):
        """ Get a queue object with this ID for a given ``redis`` connection. """
        if (self._buffer is None or self._serializer is None):
            res = redis.get(self._key + OPEN_APPEND)
            if res:
                # Q is open, so apply the existing qid props to this obj.
                qid = pickle.loads(res)
                self._buffer = qid.buffer
                self._serializer = qid.serializer
                self.opened = qid.opened

        if self._buffer is not None and self._serializer is not None:
            return Q(redis,
                     self._key,
                     buffer=self._buffer,
                     serializer=self._serializer,
                     opened=self.opened)

        return None
class RedisUse(object):
    def __init__(self):
        self.sr = StrictRedis(host='localhost',
                              port=6379,
                              decode_responses=True)

    def insertTokenOpenid(self, token, openid):
        res = self.sr.set(token, openid)
        res_time = self.sr.expire(token, 7200)

        return res

    def getTokenOpenid(self, token):
        res = self.sr.get(token)

        return res

    def insertOpenidData(self, openid, data):
        res = self.sr.hmset(openid, data)
        res_time = self.sr.expire(openid, 604800)

        return res

    def selectOpenidNature(self, openid):
        res = self.sr.hkeys(openid)

        return res

    def getOpenidNature(self, openid, nature):
        res = self.sr.hget(openid, nature)

        return res

    def getOpenidNatureAll(self, openid):
        res = self.sr.hgetall(openid)

        return res

    def deleteOpenidNature(self, openid, keys):
        res = self.sr.hdel(openid, keys)

        return res
예제 #46
0
파일: cache.py 프로젝트: ericdaat/notflix
class Cache(object):
    def __init__(self):
        self.redis_cache = StrictRedis(host=CACHE_HOST)
        self.ex = CACHE_TIMEOUT

    def get(self, key, start=None, end=None):
        """ Get an object from cache by its key

        Args:
            key (str): cache key
            start (int): when querying a redis list,\
                starting range of the list.
            end (int): when querying a redis list, ending range of the list.

        Returns:
            str: cached object
        """

        if not (start is None or end is None):
            result = self.redis_cache.lrange(key, start=start, end=end)
        else:
            result = self.redis_cache.get(key)

        return result

    def set(self, key, value):
        """ Set an object in cache by its key

        Args:
            key (str): cache key
            value (str): object to store in cache
        """
        return self.redis_cache.set(key, value, ex=self.ex)

    def append(self, key, value):
        """ Append to a redis list

        Args:
            key (str): cache key
            value (str): object to store in cache
        """
        return self.redis_cache.lpush(key, value)
예제 #47
0
class SearchEngine(object):
    def __init__(self):
        self.client = StrictRedis()

    def search(self, content):
        # 根据内容查询对应的数据 ID
        names = self.client.zrange(name=content, start=0, end=-1, desc=True)
        result = []
        # 根据 ID 拿数据
        for name in names:
            value = self.client.get(name=name)
            result.append(value)
        return result

    def add(self, name=None, value=""):
        # 存储
        self.client.set(name=name, value=value)
        # 分词
        self.analyze(name, value)
        return True

    # 分词
    def analyze(self, name, value):
        if type(value) == dict:
            for v in value.values():
                analyzed_list = jieba.cut(v, cut_all=False)
                self.generate_analyze_score(name, analyzed_list)
        elif type(value) == str:
            analyzed_list = jieba.cut(value, cut_all=False)
            self.generate_analyze_score(name, analyzed_list)
        elif isinstance(value, Iterable):
            for v in value:
                analyzed_list = jieba.cut(v, cut_all=False)
                self.generate_analyze_score(name, analyzed_list)
        else:
            raise Exception("数据格式错误")

    # 对分词后的结果计算 score
    def generate_analyze_score(self, name, analyzed_list):
        for e in analyzed_list:
            self.client.zincrby(name=e, value=name)
예제 #48
0
class RedisDB(DBSpec):
    def __init__(self, port=12000, prefix=None, use_compression=True):
        self._server = StrictRedis(port=port)
        self._prefix = "" if prefix is None else prefix

        self.index = 0

        self._use_compression = use_compression and LZ4_ENABLED
        if self._use_compression:
            self._pack = compress
            self._unpack = decompress
        else:
            self._pack = serialize
            self._unpack = deserialize

    @property
    def num_trajectories(self) -> int:
        num_trajectories = self._server.llen("trajectories") - 1
        return num_trajectories

    def push_trajectory(self, trajectory):
        trajectory = self._pack(trajectory)
        self._server.rpush("trajectories", trajectory)

    def get_trajectory(self, index=None):
        index = index if index is not None else self.index
        trajectory = self._server.lindex("trajectories", index)
        if trajectory is not None:
            trajectory = self._unpack(trajectory)
            self.index = index + 1
        return trajectory

    def dump_weights(self, weights, prefix):
        weights = self._pack(weights)
        self._server.set(f"{self._prefix}_{prefix}_weights", weights)

    def load_weights(self, prefix):
        weights = self._server.get(f"{self._prefix}_{prefix}_weights")
        if weights is not None:
            weights = self._unpack(weights)
        return weights
예제 #49
0
    def parse(self, response):
        datas = response.xpath('//div[@class="info clear"]')

        for data in datas:
            data_dict = {
                'selectData': [],
            }

            data_dict['title'] = data.xpath(
                './div[@class="title"]/a/text()').extract_first()
            data_dict['url'] = data.xpath(
                './div[@class="title"]/a/@href').extract_first()
            focus = data.xpath('./div[@class="followInfo"]/text()'
                               ).extract_first().split('/')[0]
            vide = data.xpath('./div[@class="followInfo"]/text()'
                              ).extract_first().split('/')[1]
            # area = data.xpath('.//div[@class="houseInfo"]/text()[2]').extract_first()
            houseYear = data.xpath(
                './/div[@class="positionInfo"]/text()[2]').extract_first()
            data_dict['buil_year'] = houseYear

            data_dict['vide'] = int(re.findall(r'\d+', vide)[0])
            data_dict['focus'] = int(re.findall(r'\d+', focus)[0])

            yield scrapy.Request(url=data_dict['url'],
                                 callback=self.parse_detail,
                                 meta=data_dict)

        con = StrictRedis(host='192.168.177.138', port=6379, db=1)
        page = con.get("page")
        page = int(page.decode())

        if (page < 100):
            time.sleep(1)
            next_url = "https://zz.lianjia.com/ershoufang/pg{}/".format(page)
            print("++++++++++++++++++++%s+++++++++++++++" % next_url)
            page = page + 1
            con.set("page", page)
            yield scrapy.Request(url=next_url, callback=self.parse)
        else:
            con.set("num", 1)
예제 #50
0
class MispRedisConnector(object):
    def __init__(self):
        self.r = StrictRedis(unix_socket_path=redis_socket)

    def search(self,
               authkey,
               values=None,
               hash_values=None,
               return_eid=False,
               quiet=False):
        if isinstance(values, list):
            hash_values = [SHA256.new(v.lower()).hexdigest() for v in values]
        elif values:
            hash_values = [SHA256.new(values.lower()).hexdigest()]
        elif not isinstance(hash_values, list):
            hash_values = [hash_values]

        if not hash_values:
            raise Exception('No value to search.')

        org = self.__get_org_by_auth(authkey)
        if not org:
            raise Exception('Invalid authkey')

        if quiet:
            return [(self.r.exists(h) or self.r.exists(org + ':' + h))
                    for h in hash_values]
        uuid_by_hashes = [
            self.r.smembers(h).union(self.r.smembers(org + ':' + h))
            for h in hash_values
        ]
        if not return_eid:
            to_return = uuid_by_hashes
        else:
            to_return = []
            for h in uuid_by_hashes:
                to_return.append([self.r.hget('uuid_id', uuid) for uuid in h])
        return to_return

    def __get_org_by_auth(self, authkey):
        return self.r.get(authkey)
예제 #51
0
def get_photo(mid, lvid, pid):
    print("Get Photossss")
    result = None
    with open('tmp.jpg', 'wb+') as image:
        # try to get from redis first
        print("This is from get_photo pid = %s" % pid)
        redis_client = StrictRedis(host='128.2.100.176', port=6379)
        result = redis_client.get(pid)
        if not result:  # cache miss
            print('cache missed for pid ' + pid)
        else:
            print("Cache hit for %s!!!" % pid)
            image.write(base64.decodebytes(result))
            # Response(image, content_type=result.headers['content-type'])
    if result:
        return send_file('tmp.jpg', mimetype="image/jpeg")

    with open('tmp.jpg', 'wb+') as image:
        # try to get from cassandra if not found in redis
        #if mid == '1':
        cluster = Cluster(["128.2.100.174"], port=9337)
        #else:
        #cluster = Cluster([mid], port = 9337)
        db_session = cluster.connect('photostore')
        query = db_session.prepare("SELECT * FROM photo WHERE pid=?")
        try:
            result = db_session.execute(query, [pid])
            if not result:
                abort(404)
            else:
                image.write(base64.b64decode(result[0].payload))
        except IOError:
            log.exception("Query error")
        print("This is from Cassandra!!!\n")
        value = {'payload': result[0].payload, 'mimetype': result[0].mimetype}
    tempImage = open("tmp.jpg", "rb")
    #tempImage = open(result[0].pid + "."+ result[0].mimetype.split('/')[1].replace('e',''), 'rb')
    imagestr = base64.b64encode(tempImage.read())
    redis_client.setex(result[0].pid, 20, imagestr)
    tempImage.close()
    return send_file('tmp.jpg', mimetype=value['mimetype'])
예제 #52
0
class Reader(BaseReader):
    """
    Redis settings Reader
    A simple redis getter
    """
    _default_conf = {'host': 'localhost', 'port': 6379}

    def __init__(self, conf):
        super(Reader, self).__init__(conf)

        self.redis = StrictRedis(host=self.conf['host'],
                                 port=self.conf['port'])

    def _get(self, key):
        result = self.redis.get(key)
        if isinstance(result, six.binary_type):
            result = result.decode('utf-8')
        return result

    def _set(self, key, value):
        self.redis.set(key, value)
예제 #53
0
class redis_session:
    prefix = 'was:session_key:'  # Redis key 앞에 넣을 값
    server_ip = 'localhost'  # Redis ip
    port = 6379

    def __init__(self):
        self.db = StrictRedis(host=self.server_ip, port=self.port, db=0)

    # 세션이 있으면 타임아웃 만큼 다시 연장해주고 없으면 False 있으면 사용자id 리턴
    def open_session(self, session_key):
        user_name = self.db.get(self.prefix + session_key)
        return user_name

    # 신규 세션 요청 시 세션 값을 만들어서 리턴
    def save_session(self, user_name):
        session_key = str(uuid4())
        try:
            self.db.set(self.prefix + session_key, user_name)
        except:
            print('error')
        return session_key
예제 #54
0
def get_shops_list(etsy_api_key: str,
                   max_stores: int,
                   redis_store: StrictRedis,
                   page: int = 1) -> List[str]:
    """TODO: Implement cache invalidation."""
    cache_key = f'etsy_store_list:page-{page}-num-{max_stores}'
    cache_hit = redis_store.get(cache_key)
    if cache_hit:
        return json.loads(cache_hit)

    api_base_url = 'https://openapi.etsy.com/v2'
    query = {'api_key': etsy_api_key, 'limit': max_stores, page: page}
    resp = requests.get(f'{api_base_url}/shops', params=query)

    if resp.status_code != 200:
        return []

    stores = resp.json()['results']
    shop_names = [store['shop_name'] for store in stores]
    redis_store.set(cache_key, json.dumps(shop_names))
    return shop_names
예제 #55
0
class RedisCache:
    def __init__(self, client=None, expires=timedelta(days=30), encoding='utf-8', compress=True):
        self.client = StrictRedis(host='localhost', port=6379, db=0) if client is None else client
        self.expires = expires
        self.encoding = encoding
        self.compress = compress

    def __getitem__(self, url):
        record = self.client.get(url)
        if record:
            if self.compress:
                record = zlib.decompress(record)
            return json.loads(record.decode(self.encoding))
        else:
            raise KeyError(url + ' does not exist')

    def __setitem__(self, url, result):
        data = bytes(json.dumps(result), self.encoding)
        if self.compress:
            data = zlib.compress(data)
        self.client.setex(url, self.expires, data)
예제 #56
0
class RedisCache:
    def __init__(self, client = None, encoding = 'utf-8', db = 0, compress = True):
        # if a client object is not passed then try
        # connecting to redis at the default localhost port
        self.client = StrictRedis(host = 'localhost', port = 6379, db = db) if client is None else client
        # self.expires = expires
        self.encoding = encoding
        self.compress = compress
        

    def __getitem__(self, url):
        '''
        Load value from Redis for the given URL
        '''
        record = self.client.get(url)
        if record:
            if self.compress:
                record = zlib.decompress(record)
            try:
                rec = record.decode(self.encoding)
            except UnicodeDecodeError:
                rec = bytes(json.dumps({'html' : None, 'code' : 403}), self.encoding)
            return json.loads(rec)
        else:
            raise KeyError(url + ' does not exist.')

    def __setitem__(self, url, result):
        '''
        Save value in Redis for the given URL
        '''
        data = bytes(json.dumps(result), self.encoding, errors = 'ignore')
        if self.compress:
            data = zlib.compress(data)
        self.client.set(url, data)

    def __len__(self):
        return self.client.dbsize()

    def erase(self):
        self.client.flushdb()
예제 #57
0
class GraphStorage(object):
    def __init__(self, link_file='links.txt', host='localhost', port=6379):
        self.db = StrictRedis(host, port)
        self.link_file = link_file

    def add_nodes(self, graphDict, nameDict):
        pipe = self.db.pipeline()
        with open(self.link_file) as link_file:
            for _ in xrange(1):
                for _ in xrange(570607):
                    node, links = self.create_link(next(link_file))
                    name = linecache.getline('titles.txt', node)
                    pipe.rpush('links-{0}'.format(node), *links)
                    pipe.append('name-{0}'.format(node), name)
                    pipe.append('id-{0}'.format(name), node)
                pipe.execute()

    def create_link(self, link):
        start, targets = link.rstrip('\n').split(': ')
        return int(start), map(int, targets.split())

    def get(self, kind, value):
        key = '{0}-{1}'.format(kind, value)
        if kind in ('name', 'id'):
            return self.db.get(key)
        elif kind == 'links':
            return self.db.lrange(key, 0, -1)
        raise ValueError

    def is_leaf(self, value):
        return self.db.sismember('leaves', value)

    def flush(self):
        self.db.flushall()

    def __len__(self):
        return self.db.dbsize()


# 5706070
예제 #58
0
파일: caching.py 프로젝트: n040661/sqlrest
class RedisCache(AbstractCache, Loggable):
  """A cache backed by Redis

  Use as a dictionary,

  >>> cache = RedisCache(host="localhost", port=6379)
  >>> cache['hello'] = 'world'
  >>> cache['hello']            # 'world'
  >>> 'hello' in cache          # True
  >>> 'goodbye' in cache        # False

  or as a function memoizer,

  >>> @cache.memoize
  >>> def hello(name):
  ...   return "Hello, " + name

  Parameters
  ----------
  same as `redis.StrictRedis`
  """
  def __init__(self, *args, **kwargs):
    AbstractCache.__init__(self, kwargs.get('timeout', datetime.timedelta(days=1)))
    Loggable.__init__(self)

    if 'timeout' in kwargs:
      del kwargs['timeout']
    self.redis = StrictRedis(*args, **kwargs)

  def get(self, key):
    # value will be None if key is missing, but this is ambiguous
    value = self.redis.get(key)
    if not self.redis.exists(key):
      raise KeyError()
    else:
      return pickle.loads(value)

  def set(self, key, value, timeout=None):
    self.redis.set(key, pickle.dumps(value))
    self.redis.expire(key, datetime.timedelta(seconds=timeout) or self.timeout)
예제 #59
0
def login_fun():
    data = request.json

    response = make_response()
    response.content_type = 'application/json'
    status = login_sql(data['username'], data['password'])
    if status == 1:
        # 获取SECRET_KEY
        redis = StrictRedis(host='localhost',
                            port=6379,
                            db=0,
                            password='******')
        secret_key = redis.get('SECRET_KEY')
        expiration = 3600
        s = Serializer(secret_key, expires_in=expiration)  # expiration是过期时间
        token = s.dumps({'username': data['username']})
        token = str(token, 'utf-8')
        redis.set(data['username'], token)
        redis.expire(data['username'], 3600)
        post_data = {
            'info': '登录成功',
            'token': token,
            'username': data['username']
        }
        response = make_response(json.dumps(post_data))
        response.content_type = 'application/json'
        response.status_code = 200
        return response
    elif status == 0:
        post_data = {'info': '密码错误'}
        response = make_response(json.dumps(post_data))
        response.content_type = 'application/json'
        response.status_code = 401
        return response
    else:
        post_data = {"info": "此用户不存在"}
        response = make_response(json.dumps(post_data))
        response.content_type = 'application/json'
        response.status_code = 403
    return response
예제 #60
0
class ExpiringDatasetCache(object):
    """
    Cache with expiring values to keep track of recently created replicas.
    """
    def __init__(self, redis_host, redis_port, timeout=1, prefix='expiring_did_cache'):
        self._redis = StrictRedis(host=redis_host, port=redis_port)
        self._prefix = prefix + '_' + str(uuid4()).split('-')[0]
        self._timeout = timeout

    def add_dataset(self, dataset):
        """ Adds a datasets to cache with lifetime """
        key = ':'.join((self._prefix, dataset))
        self._redis.set(key, 1)
        self._redis.expire(key, self._timeout)

    def check_dataset(self, dataset):
        """ Checks if dataset is still in cache """
        key = ':'.join((self._prefix, dataset))
        if self._redis.get(key) is None:
            return False

        return True