class MemcachedCache(CacheBase): def __init__(self, config, section): from memcache import Client servers = config.get(section, 'memcache-server').split() self.conn = Client(servers) def key(self, raw): if sys.version_info >= (3, ): return self.prefix(hashlib.md5(bytes(raw, 'utf-8')).hexdigest()) else: return self.prefix(hashlib.md5(raw).hexdigest()).encode('utf-8') def check_password(self, user, password): cached = self.conn.get(self.key('%s-pass' % user)) if cached is None: return cached return cached == self.hash(password, cached) def set_password(self, user, password): self.conn.set(self.key('%s-pass' % user), self.hash(password, None), self.expire) def in_groups(self, user, groups): cached = self.conn.get(self.key('%s-groups' % user)) if cached is None: return None return not cached.isdisjoint(groups) def set_groups(self, user, groups): self.conn.set(self.key('%s-groups' % user), groups, self.expire)
class MemcacheStore(DictProxy): def __init__(self, host, port): from memcache import Client self.mc= Client(['%s:%s' % (host, port)]) def update(self, updates): for update in updates: (processId, data)= update self.mc.set(processId, data) def get(self, processId, default= None): data= self.mc.get(processId) if data == None: return default return data def pop(self, processId): data= self.mc.get(processId) self.mc.delete(processId) return data if data == None: return default def __len__(self): return int(self.mc.get_stats()[0][1].get('curr_items'))
class SocketHandler(websocket.WebSocketHandler): client_infos = defaultdict() def __init__(self, *args, **kwargs): super(SocketHandler, self).__init__(*args, **kwargs) servers = ['127.0.0.1:11211'] self._mc = Client(servers) def open(self): self.client_infos[self] = ClientInfo() def on_close(self): del self.client_infos[self] def on_message(self, message): now = datetime.now() delta = now - self.client_infos[self].prev_message_time # TODO: Restrict access based on delta. time = self._mc.get('time') if self.client_infos[self].prev_image_time == time: self.write_message({'timeout': 50}) else: image = self._mc.get('image') image = base64.b64encode(image) self.write_message({'image': image, 'timeout': 80}) self.client_infos[self].prev_image_time = time self.client_infos[self].prev_message_time = now def check_origin(self, origin): return True
class MemcachedCacheStorage: def __init__(self, settings): self.expiration_secs = settings.getint("HTTPCACHE_EXPIRATION_SECS") self.client = Client((settings["MEMCACHED_LOCATION"], )) def key_for(self, spider, request, type_): """Creates unique identifiers to be used as cache keys in Memcached.""" key = request_fingerprint(request) return "{}-{}-{}".format(spider.name, key, type_) def store_response(self, spider, request, response): """Store the given response in the cache.""" meta_data = { "url": request.url, "method": request.method, "status": response.status, "response_url": response.url, "timestamp": time(), } data = ( ("meta_data", json.dumps(meta_data)), ("response_headers", headers_dict_to_raw(response.headers)), ("response_body", response.body), ("request_headers", headers_dict_to_raw(request.headers)), ("request_body", request.body), ) for type_, value in data: key = self.key_for(spider, request, type_) self.client.set(key, value, self.expiration_secs) def retrieve_response(self, spider, request): """Return response if present in cache, or None otherwise.""" meta_data_key = self.key_for(spider, request, "meta_data") raw_meta_data = self.client.get(meta_data_key) meta_data = json.loads(raw_meta_data) if raw_meta_data else None if not meta_data: return body, raw_headers = (self.client.get( self.key_for(spider, request, type_)) for type_ in ("response_body", "response_headers")) if not all((body, raw_headers)): return kwargs = { "url": meta_data.get("response_url"), "headers": Headers(headers_raw_to_dict(raw_headers)), } response_class = responsetypes.from_args(**kwargs) kwargs.update({"status": meta_data.get("status"), "body": body}) return response_class(**kwargs) def open_spider(self, spider): logger.debug("Using memcached cache in", extra={"spider": spider}) def close_spider(self, spider): pass
class TestCacheDecoratorWithMemcache(unittest.TestCase): def setUp(self): server = ["127.0.0.1:11211"] self.cache = Client(server) self.user_id = 5 def test_cache_is_empty(self): value = self.cache.get(str(self.user_id)) self.assertTrue(value is None) def test_get_long_response_set_value_to_cache(self): cached, result = get_long_response(self.user_id) self.assertTrue(cached is False) self.assertTrue(result == self.cache.get(str(self.user_id))) def test_get_long_response_gets_value_from_cache(self): cached_first_call, result_first_call = get_long_response(self.user_id) cached_second_call, result_second_call = get_long_response( self.user_id) self.assertTrue(cached_second_call is True) self.assertTrue(result_second_call == result_first_call) def test_get_long_response_uses_user_id_as_unique_key(self): another_user_id = 7 cached_user1_1, result_user1_1 = get_long_response(self.user_id) cached_user1_2, result_user1_2 = get_long_response(self.user_id) cached_user2_1, result_user2_1 = get_long_response(another_user_id) cached_user2_2, result_user2_2 = get_long_response(another_user_id) self.assertTrue(cached_user1_2 is True) self.assertTrue(cached_user2_2 is True) self.assertTrue(result_user1_1 == result_user1_2) self.assertTrue(result_user2_1 == result_user2_2) self.assertFalse(result_user2_2 == result_user1_2) # remove entry in memcached. self.cache.delete(str(7)) def test_delete_entry_in_cache_from_outside(self): cached, result = get_long_response(self.user_id) self.assertTrue(cached is False) value_in_cache = self.cache.get(str(self.user_id)) self.assertTrue(result == value_in_cache) self.cache.delete(str(self.user_id)) self.assertTrue(self.cache.get(str(self.user_id)) is None) def tearDown(self): self.cache.delete(str(self.user_id)) self.cache.disconnect_all()
def whereyoulive_sum(): global addresses ret = "<table class=\"normal\">" ret += "<th class=\"normal\">" + "Address" + "</th>" ret += "<th class=\"normal\">" + "Total/Address" + "</th>" conn = Client([gethostname()+':11211']) vals = [] for k in addresses: ret += "<tr class=\"normal\">" ret += "<td class=\"normal\">" + k + "</td>" val = conn.get(addr_prefix+k) if val == None: vals.append(0) else: vals.append(val) ret += "<td class=\"normal\"><span>" + str(vals[-1]) + "</span></td>" ret += "</tr>" ret += "<tr align=\"center\"><td class=\"normal\">" + "Sum" + "</td>" if len(vals) == 0: ret += "<td class=\"normal\">" + "0" + "</td></tr>" else: ret += "<td class=\"normal\">" + str(reduce(lambda i, j : i+j, [i for i in vals])) + "</td></tr>" ret += "</table>" return ret
class MemcachedZipState(object): """get or set state of an zip job. """ def __init__(self, uid): self._uid = uid if ZIPSTATE_MEMCACHEDSERVER not in os.environ: raise ValueError( 'Expect environment variable "{0}: pointing a memcached ' 'server in order to share state ' 'information.'.format(ZIPSTATE_MEMCACHEDSERVER) ) self._client = Client([os.environ[ZIPSTATE_MEMCACHEDSERVER]]) def _combined_key(self, key): return '{0}-{1}'.format(self._uid, key) def __getitem__(self, key): """get state of zip job """ return self._client.get(self._combined_key(key)) def __setitem__(self, key, value): """set state of zip job """ return self._client.set(self._combined_key(key), value)
class MemcachedAdapter(OWMCache): """ Needs: python-memcached bindings (https://pypi.python.org/pypi/python-memcached) """ # Defaults: # cache item expiration time in seconds __ITEM_LIFETIME_SECONDS = 4000 # Ten minutes # hostname and port __HOSTNAME = "127.0.0.1" __PORT = "11211" def __init__(self, hostname=__HOSTNAME, port=__PORT, item_lifetime=__ITEM_LIFETIME_SECONDS): from memcache import Client self._memcached = Client([hostname + ":" + port]) self._item_lifetime = item_lifetime def get(self, request_url): return self._memcached.get(request_url) def set(self, request_url, response_json): self._memcached.set(request_url, response_json, self._item_lifetime)
class OODictMongoMemcache(OODictMongo): """Add memcache caching capabilities to a OODictMongo.""" def __init__(self, memcache_host, mongo_uri, mongo_db, mongo_coll, mongo_id="_id", mongo_client=None, memcache_client=None, _dict=None): super(OODictMongoMemcache, self).__init__(mongo_uri, mongo_db, mongo_coll, mongo_id, mongo_client, _dict) self._memcache_host = memcache_host ## self._memcache_lock = memache_lock if memcache_client is None: self._memcache = MemcacheClient(memcache_host) else: self._memcache = memcache_client def _memcache_key(self, mongo_id=None): return str("%s:%s:%s" % (self._mongo_db, self._mongo_coll, mongo_id or self._dict.get(self._mongo_id, ''))) def get_from_field(self, key, value, flush=False): """Get user by a key:value pair from mongo or memcache.""" # if searching by id key, then we can find it in memcache if not flush and key == self._mongo_id: item = self._memcache.get(self._memcache_key(value)) if item: log.info("Cache hit.") return self._reinit(item) log.info("Cache miss.") # didn't find it in memcache, search in mongo and update cache super(OODictMongoMemcache, self).get_from_field(key, value) item = self._dict if item: self._memcache.set(self._memcache_key(), item) return self._reinit(item) def refresh(self, flush=False): """Refresh self data from memcache. If flush is True, then flush memcache entry and force a refresh from mongo. """ self.get_from_field(self._mongo_id, self._dict[self._mongo_id], flush) def save(self): """Save user data to storage.""" self._memcache.set(self._memcache_key(), self._dict) super(OODictMongoMemcache, self).save() def delete(self): """Delete user from storage.""" self._memcache.delete(self._memcache_key()) super(OODictMongoMemcache, self).delete()
class FoursquareAPI: url_base = "https://api.foursquare.com/v2/%s" payload = {} mc = False mcprefix = False def __init__(self, lifestream): OAUTH_FILENAME = lifestream.config.get("foursquare", "secrets_file") CONSUMER_KEY = lifestream.config.get("foursquare", "client_id") CONSUMER_SECRET = lifestream.config.get("foursquare", "secret") MEMCACHE_HOST = lifestream.config.get("memcache", "host") MEMCACHE_PORT = lifestream.config.get("memcache", "port") self.mcprefix = lifestream.config.get("memcache", "prefix") servers = ["%s:%s" % (MEMCACHE_HOST, MEMCACHE_PORT)] self.mc = Client(servers, debug=1) if not os.path.exists(OAUTH_FILENAME): logger.error("No OAUTH found at %s" % OAUTH_FILENAME) raise Exception("You need to run foursquare_oauth.py to generate the oauth key") oauth_token, oauth_token_secret = read_token_file(OAUTH_FILENAME) self.payload = { 'v': "20170801", 'oauth_token' : oauth_token } def cache_get(self, url, params): m = hashlib.sha224() m.update(url) m.update(str(params)) key = m.hexdigest() res = self.mc.get(key) if(res): return json.loads(res) r = requests.get(self.url_base % "users/self/checkins", params=self.payload) self.mc.set(key, json.dumps(r.json())) return r.json() def my_checkins(self): return self.cache_get(self.url_base % "users/self/checkins", params=self.payload) def search_near(self, lat, lng, intent="checkin", radius=50, limit=10): payload = self.payload payload['ll'] = "%s,%s" % (lat, lng) payload['intent'] = intent payload['radius'] = radius payload['limit'] = limit return self.cache_get(self.url_base % "venues/search", params=self.payload)
class ResetCodeMemcache(ResetCode): """ Implements the reset code methods for auth backends. """ def __init__(self, product='auth', nodes=None, debug=0, expiration=_6HOURS, **kw): if nodes is None: nodes = ['127.0.0.1:11211'] self._engine = Client(nodes, debug) self.product = product self.expiration = expiration # # Private methods # def _get_reset_code(self, user_id): return self._engine.get(self._generate_key(user_id)) def _generate_key(self, user_id): return "reset:%s:%s" % (user_id, self.product) def _set_reset_code(self, user_id): code = self._generate_reset_code() key = self._generate_key(user_id) if not self._engine.set(key, code, self.expiration): raise BackendError() return code # # Public methods # def generate_reset_code(self, user, overwrite=False): user_id = self._get_user_id(user) if not overwrite: stored_code = self._get_reset_code(user_id) if stored_code is not None: return stored_code return self._set_reset_code(user_id) def verify_reset_code(self, user, code): user_id = self._get_user_id(user) if not self._check_reset_code(code): return False stored_code = self._get_reset_code(user_id) if stored_code is None: return False return stored_code == code def clear_reset_code(self, user): user_id = self._get_user_id(user) return self._engine.delete(self._generate_key(user_id))
class MemCacheWrapper(object): """ Memcache client wrapper. No exception raise and add some useful function. """ def __init__(self, servers, logerr=None): self.cache = MCClient(servers=servers, debug=False) self.logerr = logerr def add(self, key, val=1, time=0): try: return self.cache.add(key, val, time) except Exception as e: _logger.warning("Exception during `add`: %s", e) return None def count(self, key, expires=0, delta=1): try: result = self.cache.incr(key, delta) if result is None: if not self.cache.add(key, delta, expires): result = self.cache.incr(key, delta) else: return delta return result except Exception as e: _logger.warning("Exception during `count`: %s", e) return None def get(self, key): result = None try: result = self.cache.get(str(key)) except Exception as e: _logger.warning("Exception during `get`: %s", e) return result def set(self, key, value, expires): result = False try: result = self.cache.set(str(key), value, expires) except Exception as e: _logger.warning("Exception during `set`: %s", e) return result def delete(self, key): result = False try: result = self.cache.delete(key) except Exception as e: _logger.warning("Exception during `del`: %s", e) return result
def read(self, layer, coord, format): """ Read a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) value = mem.get(key) mem.disconnect_all() return value
class OODictMongoMemcache(OODictMongo): """Add memcache caching capabilities to a OODictMongo.""" def __init__(self, memcache_host, mongo_uri, mongo_db, mongo_coll, mongo_id="_id", mongo_client=None, memcache_client=None, _dict=None): super(OODictMongoMemcache, self).__init__( mongo_uri, mongo_db, mongo_coll, mongo_id, mongo_client, _dict ) self._memcache_host = memcache_host ## self._memcache_lock = memache_lock if memcache_client is None: self._memcache = MemcacheClient(memcache_host) else: self._memcache = memcache_client def _memcache_key(self, mongo_id=None): return str("%s:%s:%s" % (self._mongo_db, self._mongo_coll, mongo_id or self._dict.get(self._mongo_id, ''))) def get_from_field(self, key, value, flush=False): """Get user by a key:value pair from mongo or memcache.""" # if searching by id key, then we can find it in memcache if not flush and key == self._mongo_id: item = self._memcache.get(self._memcache_key(value)) if item: log.info("Cache hit.") return self._reinit(item) log.info("Cache miss.") # didn't find it in memcache, search in mongo and update cache super(OODictMongoMemcache, self).get_from_field(key, value) item = self._dict if item: self._memcache.set(self._memcache_key(), item) return self._reinit(item) def refresh(self, flush=False): """Refresh self data from memcache. If flush is True, then flush memcache entry and force a refresh from mongo. """ self.get_from_field(self._mongo_id, self._dict[self._mongo_id], flush) def save(self): """Save user data to storage.""" self._memcache.set(self._memcache_key(), self._dict) super(OODictMongoMemcache, self).save() def delete(self): """Delete user from storage.""" self._memcache.delete(self._memcache_key()) super(OODictMongoMemcache, self).delete()
def whereyoulive(addr): conn = Client([gethostname() + ':11211']) if conn.get(addr_prefix + addr) != None: conn.incr(addr_prefix + addr) else: conn.set(addr_prefix + addr, 1) if not addresses.__contains__(addr): addresses.append(addr)
def whereyoulive(addr): conn = Client([gethostname()+':11211']) if conn.get(addr_prefix+addr) != None: conn.incr(addr_prefix+addr) else: conn.set(addr_prefix+addr, 1) if not addresses.__contains__(addr): addresses.append(addr)
def read(self, layer, coord, format): """ Read a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) value = mem.get(key) mem.disconnect_all() if value is None: return None return b64decode(value.encode('ascii'))
class MemcachedManager(): class Prefix(): Session = 'SESSION' SysCache = 'ORDER_SYS_CACHE' def __init__(self): # if settings.ENVIRONMENT['dev']: # pass # else: self.conn = Client(settings.CACHE['clients']) def get(self, *arg): if len(arg) ==0: return None arg = [str(i) for i in arg] key = PRODUCT_PREFIX+ '|'.join(arg) try: # if settings.ENVIRONMENT['dev']: # return _localCache.get(key) # else: return self.conn.get(key) # self.conn. except:# Client.MemcachedKeyNoneError: return None def set(self, value, timeout, *arg): if len(arg) ==0: return None arg = [str(i) for i in arg] key = PRODUCT_PREFIX+'|'.join(arg) # if settings.ENVIRONMENT['dev']: # _localCache[key] = value # else: self.conn.set(key, value, timeout) def delete(self, *arg): if len(arg) ==0: return None arg = [str(i) for i in arg] key = PRODUCT_PREFIX+'|'.join(arg) try: # if settings.ENVIRONMENT['dev']: # if _localCache.has_key(key): # _localCache.pop(key) # else: self.conn.delete(key) except: # Client.MemcachedKeyNoneError: return None
class SessionHandler: inst = None def __init__(self): SessionHandler.inst = self self.conn = Client(["127.0.0.1:11211"]) def setuid(self, uid): key = uuid4().get_hex() self.conn.set(key, uid, 3600) return key def getuid(self, key): return self.conn.get(key) def deluid(self, key): self.conn.delete(key)
def test_memcached_connection(self): # python-memcached print('MEMCACHED_PORT_11211_TCP_ADDR') print(os.environ.get('MEMCACHED_PORT_11211_TCP_ADDR')) print('MEMCACHED_PORT_11211_TCP_PORT') print(os.environ.get('MEMCACHED_PORT_11211_TCP_PORT')) mc = Client([ os.environ.get('MEMCACHED_PORT_11211_TCP_ADDR') ], debug=1) mc.set('a_string', 'some random string') newval = mc.get('a_string') self.assertEqual(newval, 'some random string') mc.disconnect_all()
class CacheMgr(object): def __init__(self, servers=["127.0.0.1:11211"]): self.__mc = Client(servers) pass def tearDown(self): self.__mc.flush_all() self.__mc.disconnect_all() def get(self, key): val = self.__mc.get(key) return val def set(self, key, val): # TODO: noreply setting self.__mc.set(key, val)
def test_memcache_load(caplog): caplog.set_level(logging.INFO) # Загрузим данные в memcache main(Options) # переименуем обратно тесовый файл modifed = os.path.join(TESTDIR, 'data', '.sample.tsv.gz') os.rename(modifed, Options.pattern) # Проверим загрузились ли наши данные client = Client(['127.0.0.1:11211']) with gzip.open(Options.pattern) as fd: for line in fd: app = parse_appsinstalled(line) key = "%s:%s" % (app.dev_type, app.dev_id) value = repr(client.get(key)) assert value is not None
class Store(object): def __init__(self, host='127.0.0.1', port=11211): self.client = Client(['%s:%d' % (host, port),], pickler=JSONPickler, unpickler=JSONUnpickler) def get(self, key): result = self.client.get(key) if result: result = result.split('\t') return result def set(self, key, val): result = self.client.add(key, val) if not result: result = self.client.append(key, '\t%s' % val) return result
class Memcache(object): """ address=127.0.0.1:11211 # TODO 链接断开后没有自动重连机制 """ def __init__(self, address): from memcache import Client self.conn = Client([address]) def get(self, key): return self.conn.get(key) def set(self, key, value, timeout): self.conn.set(key, value, timeout) def delete(self, key): self.conn.delete(key)
class MemcacheMemory(Memory): """ Memory gateway to a Memcache server """ def __init__(self, servers=["127.0.0.1:11211"], expire=0, debug=False): """ :param servers: List of servers to use. Please, read memcache.Client help. """ self._client = MemcacheClient(servers) self._expire = expire logging.basicConfig(level=logging.WARNING) self.log = logging.getLogger("Memcache-Gateway") if debug: self.log.setLevel(logging.DEBUG) def __getitem__(self, key): self.log.debug("Accessing key %s", key) value = self._client.get(key) if isinstance(value, NotSet): value = None elif value is None: raise KeyError return value def __setitem__(self, key, value): self.log.debug("Setting key") if value is None: value = NotSet() self._client.set(key, value, self._expire) def __delitem__(self, key): self.log.debug("Deleting key %s", key) if self._client.delete(key) == 0: raise KeyError def open(self, key): return KeyFile(self, key)
class MemcachedCacheStore(AbstractCacheStore): servers = ("127.0.0.1:11211") def __init__(self, servers=None, debug=False): if servers is None: servers = self.servers from memcache import Client as MemcachedClient self._client = MemcachedClient(servers, debug) def set(self, key, val, time=0): self._client.set(key, val, time) def add(self, key, val, time=0): res = self._client.add(key, val, time) if not res: raise Error("a value for key %r is already in the cache" % key) self._data[key] = (val, time) def replace(self, key, val, time=0): res = self._client.replace(key, val, time) if not res: raise Error("a value for key %r is already in the cache" % key) self._data[key] = (val, time) def delete(self, key): res = self._client.delete(key, time=0) if not res: raise KeyError(key) def get(self, key): val = self._client.get(key) if val is None: raise KeyError(key) return val def clear(self): self._client.flush_all()
class MemcachedCacheStore(AbstractCacheStore): servers = ('127.0.0.1:11211') def __init__(self, servers=None, debug=False): if servers is None: servers = self.servers from memcache import Client as MemcachedClient self._client = MemcachedClient(servers, debug) def set(self, key, val, time=0): self._client.set(key, val, time) def add(self, key, val, time=0): res = self._client.add(key, val, time) if not res: raise Error('a value for key %r is already in the cache'%key) self._data[key] = (val, time) def replace(self, key, val, time=0): res = self._client.replace(key, val, time) if not res: raise Error('a value for key %r is already in the cache'%key) self._data[key] = (val, time) def delete(self, key): res = self._client.delete(key, time=0) if not res: raise KeyError(key) def get(self, key): val = self._client.get(key) if val is None: raise KeyError(key) else: return val def clear(self): self._client.flush_all()
def whereyoulive_sum(): global addresses ret = "<table class=\"normal\">" ret += "<th class=\"normal\">" + "Address" + "</th>" ret += "<th class=\"normal\">" + "Total/Address" + "</th>" conn = Client([gethostname() + ':11211']) vals = [] for k in addresses: ret += "<tr class=\"normal\">" ret += "<td class=\"normal\">" + k + "</td>" val = conn.get(addr_prefix + k) if val == None: vals.append(0) else: vals.append(val) ret += "<td class=\"normal\"><span>" + str(vals[-1]) + "</span></td>" ret += "</tr>" ret += "<tr align=\"center\"><td class=\"normal\">" + "Sum" + "</td>" if len(vals) == 0: ret += "<td class=\"normal\">" + "0" + "</td></tr>" else: ret += "<td class=\"normal\">" + str( reduce(lambda i, j: i + j, [i for i in vals])) + "</td></tr>" ret += "</table>" return ret
class MemcachedAdapter(OWMCache): """ Needs: python-memcached bindings (https://pypi.python.org/pypi/python-memcached) """ # Defaults: # cache item expiration time in seconds __ITEM_LIFETIME_SECONDS = 60*10 # Ten minutes # hostname and port __HOSTNAME = "127.0.0.1" __PORT = "11211" def __init__(self, hostname=__HOSTNAME, port=__PORT, item_lifetime=__ITEM_LIFETIME_SECONDS): from memcache import Client self._memcached = Client([hostname+":"+port]) self._item_lifetime = item_lifetime def get(self, request_url): return self._memcached.get(request_url) def set(self, request_url, response_json): self._memcached.set(request_url, response_json, self._item_lifetime)
class TestMemcache(unittest.TestCase): def setUp(self): # TODO(): unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1) def tearDown(self): self.mc.flush_all() self.mc.disconnect_all() def check_setget(self, key, val, noreply=False): self.mc.set(key, val, noreply=noreply) newval = self.mc.get(key) self.assertEqual(newval, val) def test_setget(self): self.check_setget("a_string", "some random string") self.check_setget("a_string_2", "some random string", noreply=True) self.check_setget("an_integer", 42) self.check_setget("an_integer_2", 42, noreply=True) def test_delete(self): self.check_setget("long", int(1 << 30)) result = self.mc.delete("long") self.assertEqual(result, True) self.assertEqual(self.mc.get("long"), None) @mock.patch.object(_Host, 'send_cmd') @mock.patch.object(_Host, 'readline') def test_touch(self, mock_readline, mock_send_cmd): with captured_stderr(): self.mc.touch('key') mock_send_cmd.assert_called_with(b'touch key 0') def test_get_multi(self): self.check_setget("gm_a_string", "some random string") self.check_setget("gm_an_integer", 42) self.assertEqual(self.mc.get_multi(["gm_a_string", "gm_an_integer"]), { "gm_an_integer": 42, "gm_a_string": "some random string" }) def test_get_unknown_value(self): self.mc.delete("unknown_value") self.assertEqual(self.mc.get("unknown_value"), None) def test_setget_foostruct(self): f = FooStruct() self.check_setget("foostruct", f) self.check_setget("foostruct_2", f, noreply=True) def test_incr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.incr("i_an_integer", 1), 43) def test_incr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.incr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 43) def test_decr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.decr("i_an_integer", 1), 41) def test_decr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.decr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 41) def test_sending_spaces(self): try: self.mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_control_characters(self): try: self.mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_key_too_long(self): try: self.mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError as err: self.assertTrue("length is >" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyLengthError, nothing raised") # These should work. self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1, noreply=True) def test_setget_boolean(self): """GitHub issue #75. Set/get with boolean values.""" self.check_setget("bool", True) def test_unicode_key(self): s = u'\u4f1a' maxlen = SERVER_MAX_KEY_LENGTH // len(s.encode('utf-8')) key = s * maxlen self.mc.set(key, 5) value = self.mc.get(key) self.assertEqual(value, 5) def test_unicode_value(self): key = 'key' value = u'Iñtërnâtiônàlizætiøn2' self.mc.set(key, value) cached_value = self.mc.get(key) self.assertEqual(value, cached_value) def test_binary_string(self): value = 'value_to_be_compressed' compressed_value = zlib.compress(value.encode()) self.mc.set('binary1', compressed_value) compressed_result = self.mc.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) self.mc.add('binary1-add', compressed_value) compressed_result = self.mc.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) self.mc.set_multi({'binary1-set_many': compressed_value}) compressed_result = self.mc.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) def test_ignore_too_large_value(self): # NOTE: "MemCached: while expecting[...]" is normal... key = 'keyhere' value = 'a' * (SERVER_MAX_VALUE_LENGTH // 2) self.assertTrue(self.mc.set(key, value)) self.assertEqual(self.mc.get(key), value) value = 'a' * SERVER_MAX_VALUE_LENGTH with captured_stderr() as log: self.assertIs(self.mc.set(key, value), False) self.assertEqual( log.getvalue(), "MemCached: while expecting 'STORED', got unexpected response " "'SERVER_ERROR object too large for cache'\n") # This test fails if the -I option is used on the memcached server self.assertTrue(self.mc.get(key) is None) def test_get_set_multi_key_prefix(self): """Testing set_multi() with no memcacheds running.""" prefix = 'pfx_' values = {'key1': 'a', 'key2': 'b'} errors = self.mc.set_multi(values, key_prefix=prefix) self.assertEqual(errors, []) keys = list(values) self.assertEqual(self.mc.get_multi(keys, key_prefix=prefix), values) def test_set_multi_dead_servers(self): """Testing set_multi() with no memcacheds running.""" self.mc.disconnect_all() with captured_stderr() as log: for server in self.mc.servers: server.mark_dead('test') self.assertIn('Marking dead.', log.getvalue()) errors = self.mc.set_multi({'key1': 'a', 'key2': 'b'}) self.assertEqual(sorted(errors), ['key1', 'key2']) def test_disconnect_all_delete_multi(self): """Testing delete_multi() with no memcacheds running.""" self.mc.disconnect_all() with captured_stderr() as output: ret = self.mc.delete_multi(('keyhere', 'keythere')) self.assertEqual(ret, 1) self.assertEqual( output.getvalue(), "MemCached: while expecting 'DELETED', got unexpected response " "'NOT_FOUND'\n" "MemCached: while expecting 'DELETED', got unexpected response " "'NOT_FOUND'\n") @mock.patch.object(_Host, 'send_cmd') # Don't send any commands. @mock.patch.object(_Host, 'readline') def test_touch_unexpected_reply(self, mock_readline, mock_send_cmd): """touch() logs an error upon receiving an unexpected reply.""" mock_readline.return_value = 'SET' # the unexpected reply with captured_stderr() as output: self.mc.touch('key') self.assertEqual( output.getvalue(), "MemCached: touch expected %s, got: 'SET'\n" % b'TOUCHED')
class PrintFavicon(BaseHandler): def __init__(self): super(PrintFavicon, self).__init__() default_icon_data = self.open(DEFAULT_FAVICON_LOC, time()).read() self.default_icon = Icon(data=default_icon_data, location=DEFAULT_FAVICON_LOC, type=DEFAULT_FAVICON_TYPE) self.env = Environment(loader=FileSystemLoader( os.path.join(cherrypy.config['favicon.root'], 'templates'))) self.mc = Client( ['%(memcache.host)s:%(memcache.port)d' % cherrypy.config], debug=2) # Initialize counters for counter in ['requests', 'hits', 'defaults']: self.mc.add('counter-%s' % counter, '0') def open(self, url, start, headers=None): time_spent = int(time() - start) if time_spent >= TIMEOUT: raise TimeoutError(time_spent) if not headers: headers = dict() headers.update({ 'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; ' + 'rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13' }) opener = build_opener(HTTPRedirectHandler(), HTTPCookieProcessor()) return opener.open(Request(url, headers=headers), timeout=min(CONNECTION_TIMEOUT, TIMEOUT - time_spent)) def validateIconResponse(self, iconResponse): if iconResponse.getcode() != 200: cherrypy.log('Non-success response:%d fetching url:%s' % \ (iconResponse.getcode(), iconResponse.geturl()), severity=INFO) return None iconContentType = iconResponse.info().gettype() if iconContentType in ICON_MIMETYPE_BLACKLIST: cherrypy.log('Url:%s favicon content-Type:%s blacklisted' % \ (iconResponse.geturl(), iconContentType), severity=INFO) return None icon = iconResponse.read() iconLength = len(icon) if iconLength == 0: cherrypy.log('Url:%s null content length' % iconResponse.geturl(), severity=INFO) return None if iconLength < MIN_ICON_LENGTH or iconLength > MAX_ICON_LENGTH: # Issue warning, but accept nonetheless! cherrypy.log('Warning: url:%s favicon size:%d out of bounds' % \ (iconResponse.geturl(), iconLength), severity=INFO) return Icon(data=icon, type=iconContentType) # Icon at [domain]/favicon.ico? def iconAtRoot(self, targetDomain, start): cherrypy.log('Attempting to locate favicon for domain:%s at root' % \ targetDomain, severity=INFO) rootIconPath = targetDomain + '/favicon.ico' try: rootDomainFaviconResult = self.open(rootIconPath, start) rootIcon = self.validateIconResponse(rootDomainFaviconResult) if rootIcon: cherrypy.log('Found favicon for domain:%s at root' % targetDomain, severity=INFO) self.cacheIcon(targetDomain, rootIcon.data, rootIconPath) rootIcon.location = rootIconPath return rootIcon except: cherrypy.log('Error fetching favicon at domain root:%s, err:%s, msg:%s' % \ (targetDomain, sys.exc_info()[0], sys.exc_info()[1]), severity=INFO) # Icon specified in page? def iconInPage(self, targetDomain, targetPath, start, refresh=True): cherrypy.log('Attempting to locate embedded favicon link in page:%s' % \ targetPath, severity=INFO) try: rootDomainPageResult = self.open(targetPath, start) if rootDomainPageResult.getcode() == 200: pageSoup = BeautifulSoup(rootDomainPageResult.read()) pageSoupIcon = pageSoup.find( 'link', rel=compile('^(shortcut|icon|shortcut icon)$', IGNORECASE)) if pageSoupIcon: pageIconHref = pageSoupIcon.get('href') if pageIconHref: pageIconPath = urljoin(targetPath, pageIconHref) cherrypy.log('Found embedded favicon link:%s for domain:%s' % \ (pageIconPath, targetDomain), severity=INFO) cookies = rootDomainPageResult.headers.getheaders( "Set-Cookie") headers = None if cookies: headers = {'Cookie': ';'.join(cookies)} pagePathFaviconResult = self.open(pageIconPath, start, headers=headers) pageIcon = self.validateIconResponse( pagePathFaviconResult) if pageIcon: cherrypy.log('Found favicon at:%s for domain:%s' % \ (pageIconPath, targetDomain), severity=INFO) self.cacheIcon(targetDomain, pageIcon.data, pageIconPath) pageIcon.location = pageIconPath return pageIcon else: if refresh: for meta in pageSoup.findAll('meta'): if meta.get('http-equiv', '').lower() == 'refresh': match = search('url=([^;]+)', meta.get('content', ''), flags=IGNORECASE) if match: refreshPath = urljoin( rootDomainPageResult.geturl(), match.group(1)) cherrypy.log('Processing refresh directive:%s for domain:%s' % \ (refreshPath, targetDomain), severity=INFO) return self.iconInPage(targetDomain, refreshPath, start, refresh=False) cherrypy.log('No link tag found:%s' % targetPath, severity=INFO) else: cherrypy.log('Non-success response:%d for url:%s' % \ (rootDomainPageResult.getcode(), targetPath), severity=INFO) except: cherrypy.log('Error extracting favicon from page:%s, err:%s, msg:%s' % \ (targetPath, sys.exc_info()[0], sys.exc_info()[1]), severity=WARNING) def cacheIcon(self, domain, icon, loc): cherrypy.log('Caching icon at location:%s for domain:%s' % (loc, domain), severity=INFO) if not self.mc.set('icon-%s' % domain, icon, time=MC_CACHE_TIME): cherrypy.log('Could not cache icon for domain:%s' % domain, severity=ERROR) def iconInCache(self, targetDomain, start): icon = self.mc.get('icon-%s' % targetDomain) if icon: self.mc.incr('counter-hits') cherrypy.log('Cache hit:%s' % targetDomain, severity=INFO) cherrypy.response.headers['X-Cache'] = 'Hit' if icon == 'DEFAULT': self.mc.incr('counter-defaults') cherrypy.response.headers['X-Cache'] = 'Hit' return self.default_icon else: return Icon(data=icon) def writeIcon(self, icon): self.writeHeaders(icon) return icon.data def writeHeaders(self, icon, fmt='%a, %d %b %Y %H:%M:%S %z'): # MIME Type cherrypy.response.headers['Content-Type'] = icon.type or 'image/x-icon' # Set caching headers cherrypy.response.headers['Cache-Control'] = 'public, max-age=2592000' cherrypy.response.headers['Expires'] = \ (datetime.now() + timedelta(days=30)).strftime(fmt) def parse(self, url): # Get page path targetPath = self.urldecode(url) if not targetPath.startswith('http'): targetPath = 'http://%s' % targetPath cherrypy.log('Decoded URL:%s' % targetPath, severity=INFO) # Split path to get domain targetURL = urlparse(targetPath) if not targetURL or not targetURL.scheme or not targetURL.netloc: raise cherrypy.HTTPError(400, 'Malformed URL:%s' % url) targetDomain = '%s://%s' % (targetURL.scheme, targetURL.netloc) cherrypy.log('URL:%s, domain:%s' % (targetPath, targetDomain), severity=INFO) return (targetPath, targetDomain) @cherrypy.expose def index(self): status = {'status': 'ok', 'counters': dict()} for counter in ['requests', 'hits', 'defaults']: status['counters'][counter] = self.mc.get('counter-%s' % counter) return json.dumps(status) @cherrypy.expose def test(self): topSites = open( os.path.join(cherrypy.config['favicon.root'], 'topsites.txt'), 'r').read().split() template = self.env.get_template('test.html') return template.render(topSites=topSites) @cherrypy.expose def clear(self, url): cherrypy.log('Incoming cache invalidation request:%s' % url, severity=INFO) targetPath, targetDomain = self.parse(str(url)) self.mc.delete('icon_loc-%s' % targetDomain) cherrypy.log('Evicted cache entry for %s' % targetDomain, severity=INFO) @cherrypy.expose def s(self, url, skipCache='false'): start = time() if skipCache.lower() == 'true': skipCache = True else: skipCache = False cherrypy.log('Incoming request:%s (skipCache=%s)' % (url, skipCache), severity=INFO) self.mc.incr('counter-requests') targetPath, targetDomain = self.parse(str(url)) icon = (not skipCache and self.iconInCache(targetDomain, start)) or \ self.iconInPage(targetDomain, targetPath, start) or \ self.iconAtRoot(targetDomain, start) if not icon: cherrypy.log('Falling back to default icon for:%s' % targetDomain, severity=INFO) self.cacheIcon(targetDomain, 'DEFAULT', 'DEFAULT_LOC') self.mc.incr('counter-defaults') icon = self.default_icon cherrypy.log('Time taken to process domain:%s %f' % \ (targetDomain, time() - start), severity=INFO) return self.writeIcon(icon)
from os.path import exists from memcache import Client from sekrit import MEMCACHE_SOCKET if exists(MEMCACHE_SOCKET): CACHE = Client(['unix:' + MEMCACHE_SOCKET], debug=True) else: CACHE = Client(['127.0.0.1:11213'], debug=True) S = lambda key, value: CACHE.set(str(key), value) G = lambda key: CACHE.get(str(key)) def store_dec(s): def inner(tag, url): s(tag, url) S(tag, url) return inner def retrieve_dec(r): def inner(tag): url = G(tag) if url is None: url = r(tag) S(tag, url) return url return inner
from memcache import Client import pandas as pd memcached_port = '9150' hostname = 'ec2-54-183-15-168.us-west-1.compute.amazonaws.com' mc = Client([hostname + ':' + memcached_port], debug=0) #defining memcached client df = pd.read_csv("SO2 Emissions Key Value Cache.csv" ) # reading csv file to a pandas dataframe for key in range(len(df)): mc.set(str(key) + 'Ali', df.loc[key, ] ) #writing each row of a dataframe as key-value pair in memcached read_df = pd.DataFrame(mc.get('0Ali')).transpose( ) #reading first item of database in a new pandas dataframe for key in range(1, len(df)): read_df = read_df.append( mc.get(str(key) + 'Ali')) #reading all remaining items in respective dataframe read_df.to_csv("New_SO2 Emissions Key Value Cache.csv" ) #writling new dataframe into a new csv file
def index(request): mc = Client(["127.0.0.1:11211"]) # morgan.online.ntnu.no lists = mc.get("sympa_lists") # If we couldn't find any information in memcache right now if lists is None: lists = [{ 'name': 'linjeforeninger', 'members': [{ 'subscriber': '', 'name': 'Alf', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Abakus', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Bergstuderendes Forening', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Janus', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Placebo', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'HC', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Volvox', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Socius Extremus', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Erudio', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Paideia', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'CAF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'ELF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'PSI', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Katharsis', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Geolf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ludimus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Passe Simple', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Eureka', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Sturm Und Drang', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Theodor', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kwakiutl', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Primetime', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Akwaaba', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Jump Cut', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Panoptikon', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kultura', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Gengangere', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Pareto', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Dionysos', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Folkevalgte', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Apeiron', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Psykolosjen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Det Historiske Selskab', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'LiMP', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ivrig', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Teaterlosjen', 'email': '*****@*****.**' }] }, { 'name': 'dragvoll', 'members': [{ 'subscriber': 'mail', 'name': 'Karrieredagen Dragvoll', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Det Historiske Selskab', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'CAF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'ELF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'PSI', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Katharsis', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Geolf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Passe Simple', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Eureka', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Sturm Und Drang', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Theodor', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kwakiutl', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Primetime', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Akwaaba', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Jump Cut', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Panoptikon', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kultura', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Teaterlosjen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Pareto', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Dionysos', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Folkevalgte', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Apeiron', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Psykolosjen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Paideia', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Socius Extremus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Erudio', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studentrådet SVT', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'LiMP', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ivrig', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studentrådet HF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ludimus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Gengangere', 'email': '*****@*****.**' }] }, { 'name': 'gloshaugen', 'members': [{ 'subscriber': 'mail', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Volvox', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Alf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'HC', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Janus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Bergstuderendes Forening', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Placebo', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Abakus', 'email': '*****@*****.**' }] }, { 'name': 'kjellere', 'members': [{ 'subscriber': '', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Bergstuderendes Forening Kjellersjef', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Dragvoll Kjelleren', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'ICOT', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Emil Kjellern', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Lauget', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'De Taktlause', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'HC', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Dykkergruppa', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'LaBamba', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Janus Kjellersjef', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Realfagskjellern', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'isu-orga', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'NTNUI', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Psykolosjen', 'email': '*****@*****.**' }] }, { 'name': 'foreninger', 'members': [{ 'subscriber': '', 'name': 'NTNUI', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Radio Revolt', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Styret i Samfundet', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Student-TV', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Under Dusken', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Velferdstinget', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Linjeforeninger på NTN', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Masterforeninger på Gløshaugen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studenttinget', 'email': '*****@*****.**' }] }, { 'name': 'masterforeninger', 'members': [{ 'subscriber': 'mail', 'name': 'Soma', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Projeksjon', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Symbiosis', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Signifikant', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Hippodamus', 'email': '*****@*****.**' }] }, { 'name': 'sr-samarbeid', 'members': [{ 'subscriber': '', 'name': 'Studentrådet IME', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Abakus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }] }, { 'name': 'ivt-samarbeid', 'members': [{ 'subscriber': 'mail', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studentrådet IVT', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'BSF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Teknologiporten', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }] }, { 'name': 'linjeledere', 'members': [{ 'subscriber': 'mail', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Placebo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Janus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Tidligere Linjeledere', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Abakus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Volvox', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Berg', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Alf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'HC', 'email': '*****@*****.**' }] }] return render(request, 'mailinglists/index.html', {'lists': lists})
class StorageServerApp(SyncServerApp): """Storage application""" def __init__(self, urls, controllers, config=None, auth_class=Authentication): super(StorageServerApp, self).__init__(urls, controllers, config, auth_class) self.config = config # Collecting the host-specific config and building connectors. self.storages = {'default': get_storage(config)} hostnames = set() host_token = 'host:' for cfgkey in config: if cfgkey.startswith(host_token): # Get the hostname from the config key. This assumes # that host-specific keys have two trailing components # that specify the setting to override. # E.g: "host:localhost.storage.sqluri" => "localhost" hostname = cfgkey[len(host_token):].rsplit(".", 2)[0] hostnames.add(hostname) for hostname in hostnames: host_cfg = self._host_specific(hostname, config) self.storages[hostname] = get_storage(host_cfg) self.check_blacklist = \ self.config.get('storage.check_blacklisted_nodes', False) if self.check_blacklist and Client is not None: servers = self.config.get('storage.cache_servers', '127.0.0.1:11211') self.cache = Client(servers.split(',')) else: if self.check_blacklist: raise ValueError('The "check_blacklisted_node" option ' 'needs a memcached server') self.cache = None def get_storage(self, request): host = request.host if host not in self.storages: host = 'default' return self.storages[host] def _before_call(self, request): # let's control if this server is not on the blacklist if not self.check_blacklist: return {} host = request.host if self.cache.get('down:%s' % host) is not None: # the server is marked as down -- let's exit raise HTTPServiceUnavailable("Server Problem Detected") backoff = self.cache.get('backoff:%s' % host) if backoff is not None: # the server is marked to back-off requests. We will treat those # but add the header return {'X-Weave-Backoff': str(backoff)} return {} def _debug_server(self, request): res = [] storage = self.get_storage(request) res.append('- backend: %s' % storage.get_name()) if storage.get_name() in ('memcached',): cache_servers = ['%s:%d' % (server.ip, server.port) for server in storage.cache.servers] res.append('- memcached servers: %s</li>' % \ ', '.join(cache_servers)) if storage.get_name() in ('sql', 'memcached'): res.append('- sqluri: %s' % storage.sqluri) return res
from werkzeug.contrib.cache import MemcachedCache from memcache import Client import sys cache = Client(["127.0.0.1:12000"], debug=True) print sys.argv #cache.set("foo", "sss") if sys.argv[1] == "1": cache.set("foo", "asdsadasdsadas", 4, 1) else: print cache.get("foo") #
class TestMemcache(TestCase): def setUp(self): # TODO: unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1) pass def check_setget(self, key, val, noreply=False): self.mc.set(key, val, noreply=noreply) newval = self.mc.get(key) self.assertEqual(newval, val) def test_setget(self): self.check_setget("a_string", "some random string") self.check_setget("a_string_2", "some random string", noreply=True) self.check_setget("an_integer", 42) self.check_setget("an_integer_2", 42, noreply=True) def test_delete(self): self.check_setget("long", int(1 << 30)) result = self.mc.delete("long") self.assertEqual(result, True) self.assertEqual(self.mc.get("long"), None) def test_get_multi(self): self.check_setget("gm_a_string", "some random string") self.check_setget("gm_an_integer", 42) self.assertEqual( self.mc.get_multi(["gm_a_string", "gm_an_integer"]), {"gm_an_integer": 42, "gm_a_string": "some random string"}) def test_get_unknown_value(self): self.assertEqual(self.mc.get("unknown_value"), None) def test_setget_foostruct(self): f = FooStruct() self.check_setget("foostruct", f) self.check_setget("foostruct_2", f, noreply=True) def test_incr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.incr("i_an_integer", 1), 43) def test_incr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.incr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 43) def test_decr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.decr("i_an_integer", 1), 41) def test_decr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.decr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 41) def test_sending_spaces(self): try: self.mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_control_characters(self): try: self.mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_key_too_long(self): try: self.mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError as err: self.assertTrue("length is >" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyLengthError, nothing raised") # These should work. self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1, noreply=True)
def get(self, key): return Client.get(self, self.uniqeKey + str(key))
class MemcacheFeatureStorage(FeatureStorage): PREFIX = 'georest_buckets' support_version = False def __init__(self, hosts): """ Feature storage implemented in Memcache :param list hosts: list of hosts 1. Strings of the form C{"host:port"} 2. Tuples of the form C{("host:port", weight)} :rtype :class:`MemcacheFeatureStorage` """ self._client = Client(servers=hosts) def create_bucket(self, name, overwrite=False, **kwargs): bucket_name = self._make_bucket_name(name) timestamp = time.time() try: add_ok = self._client.add(key=bucket_name, val=timestamp) except Exception as e: raise StorageInternalError(message='add error', e=e) if not add_ok: if overwrite: try: rep_ok = self._client.replace( key=bucket_name, val=timestamp) except Exception as e: raise StorageInternalError('replace error', e=e) if not rep_ok: raise StorageInternalError(message='failed to replace') else: raise DuplicatedBucket(name) return MemcacheFeatureBucket(name, self._client, str(timestamp)) def get_bucket(self, name): bucket_name = self._make_bucket_name(name) try: timestamp = self._client.get(bucket_name) except Exception as e: raise StorageInternalError(message='get error', e=e) if not timestamp: raise BucketNotFound(name) return MemcacheFeatureBucket(name, self._client, str(timestamp)) def delete_bucket(self, name): bucket_name = self._make_bucket_name(name) try: delete_ok = self._client.delete(bucket_name) except Exception as e: raise StorageInternalError(message='delete error', e=e) if not delete_ok: raise BucketNotFound(name) return True def has_bucket(self, name): bucket_name = self._make_bucket_name(name) try: get_ok = self._client.get(bucket_name) except Exception as e: raise StorageInternalError(message='get error', e=e) return get_ok is not None def close(self): pass def _make_bucket_name(self, name): if isinstance(name, unicode): name = name.encode('utf-8') return '.'.join((self.PREFIX, name))
def test_insert_appsinstalled(): mc = Client(['127.0.0.1:33013']) test_line = 'idfa 1rfw452y52g2gq4g 55.55 42.42 1423,43,567,3,7,23' appsinstalled = mt.parse_appsinstalled(test_line) mt.insert_appsinstalled('127.0.0.1:33013', appsinstalled) assert mc.get('idfa:1rfw452y52g2gq4g') == '\x08\x8f\x0b\x08+\x08\xb7\x04\x08\x03\x08\x07\x08\x17\x11fffff\xc6K@\x19\xf6(\\\x8f\xc25E@'
from datetime import datetime # port = '11211' #host = 'localhost' memcached_port = '9150' redis_port = '9250' password = '' hostname = 'ec2-54-183-15-168.us-west-1.compute.amazonaws.com' mc = Client([hostname + ':' + memcached_port], debug=0) #mc = Redis(host=hostname, port=redis_port, password=password) start_time = datetime.now() mc.set("first_key", "first value") value = mc.get("first_key") print(value) mc.set("second_key", 3) mc.delete("second_key") mc.set("key", "1") # note that the key used for incr/decr must be # a string. value = mc.get('key') print(value) mc.incr("key") value = mc.get('key') print(value) mc.decr("key") value = mc.get('key') print(value)
class MemcachedLoaderMixin(object): """ Uses a memcached server to cache the templates. Requires the memcache library from tummy__. __ http://www.tummy.com/Community/software/python-memcached/ """ def __init__(self, use_memcache, memcache_time=60 * 60 * 24 * 7, memcache_host=None, item_prefix='template/'): try: from memcache import Client except ImportError: raise RuntimeError('the %r loader requires an installed ' 'memcache module' % self.__class__.__name__) if memcache_host is None: memcache_host = ['127.0.0.1:11211'] if use_memcache: self.__memcache = Client(list(memcache_host)) self.__memcache_time = memcache_time else: self.__memcache = None self.__item_prefix = item_prefix self.__lock = Lock() def load(self, environment, name, translator): """ Load and translate a template. First we check if there is a cached version of this template in the memory cache. If this is not the cache check for a compiled template in the disk cache folder. And if none of this is the case we translate the template, cache and return it. """ self.__lock.acquire() try: # caching is only possible for the python translator. skip # all other translators if translator is not PythonTranslator: return super(MemcachedLoaderMixin, self).load(environment, name, translator) tmpl = None push_to_memory = False # check if we have something in the memory cache and the # memory cache is enabled. if self.__memcache is not None: bytecode = self.__memcache.get(self.__item_prefix + name) if bytecode: tmpl = Template.load(environment, bytecode) else: push_to_memory = True # if we still have no template we load, parse and translate it. if tmpl is None: tmpl = super(MemcachedLoaderMixin, self).load(environment, name, translator) # if memcaching is enabled and the template not loaded # we add that there. if push_to_memory: self.__memcache.set(self.__item_prefix + name, tmpl.dump(), self.__memcache_time) return tmpl finally: self.__lock.release()
class TestMemcache(unittest.TestCase): def setUp(self): # TODO(): unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1) def tearDown(self): self.mc.disconnect_all() def check_setget(self, key, val, noreply=False): self.mc.set(key, val, noreply=noreply) newval = self.mc.get(key) self.assertEqual(newval, val) def test_setget(self): self.check_setget("a_string", "some random string") self.check_setget("a_string_2", "some random string", noreply=True) self.check_setget("an_integer", 42) self.check_setget("an_integer_2", 42, noreply=True) def test_delete(self): self.check_setget("long", int(1 << 30)) result = self.mc.delete("long") self.assertEqual(result, True) self.assertEqual(self.mc.get("long"), None) def test_get_multi(self): self.check_setget("gm_a_string", "some random string") self.check_setget("gm_an_integer", 42) self.assertEqual(self.mc.get_multi(["gm_a_string", "gm_an_integer"]), { "gm_an_integer": 42, "gm_a_string": "some random string" }) def test_get_unknown_value(self): self.mc.delete("unknown_value") self.assertEqual(self.mc.get("unknown_value"), None) def test_setget_foostruct(self): f = FooStruct() self.check_setget("foostruct", f) self.check_setget("foostruct_2", f, noreply=True) def test_incr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.incr("i_an_integer", 1), 43) def test_incr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.incr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 43) def test_decr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.decr("i_an_integer", 1), 41) def test_decr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.decr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 41) def test_sending_spaces(self): try: self.mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_control_characters(self): try: self.mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_key_too_long(self): try: self.mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError as err: self.assertTrue("length is >" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyLengthError, nothing raised") # These should work. self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1, noreply=True) def test_setget_boolean(self): """GitHub issue #75. Set/get with boolean values.""" self.check_setget("bool", True) def test_unicode_key(self): s = six.u('\u4f1a') maxlen = SERVER_MAX_KEY_LENGTH // len(s.encode('utf-8')) key = s * maxlen self.mc.set(key, 5) value = self.mc.get(key) self.assertEqual(value, 5) def test_ignore_too_large_value(self): # NOTE: "MemCached: while expecting[...]" is normal... key = 'keyhere' value = 'a' * (SERVER_MAX_VALUE_LENGTH // 2) self.assertTrue(self.mc.set(key, value)) self.assertEqual(self.mc.get(key), value) value = 'a' * SERVER_MAX_VALUE_LENGTH self.assertFalse(self.mc.set(key, value)) # This test fails if the -I option is used on the memcached server self.assertTrue(self.mc.get(key) is None) def test_get_set_multi_key_prefix(self): """Testing set_multi() with no memcacheds running.""" prefix = 'pfx_' values = {'key1': 'a', 'key2': 'b'} errors = self.mc.set_multi(values, key_prefix=prefix) self.assertEqual(errors, []) keys = list(values) self.assertEqual(self.mc.get_multi(keys, key_prefix=prefix), values) def test_set_multi_dead_servers(self): """Testing set_multi() with no memcacheds running.""" self.mc.disconnect_all() for server in self.mc.servers: server.mark_dead('test') errors = self.mc.set_multi({'key1': 'a', 'key2': 'b'}) self.assertEqual(sorted(errors), ['key1', 'key2']) def test_disconnect_all_delete_multi(self): """Testing delete_multi() with no memcacheds running.""" self.mc.disconnect_all() ret = self.mc.delete_multi({'keyhere': 'a', 'keythere': 'b'}) self.assertEqual(ret, 1)
""" This is a simple app to generate random numbers for keys stored in memcached. The keys are upper case letters starting from 'A' through to 'Z' (inclusive). The key value will only be updated if a random number is less than 10 or greater than 100. """ from memcache import Client import string, random, time servers = ["127.0.0.1:11211"] mc = Client(servers, debug=1) if __name__ == '__main__': for c in string.ascii_uppercase: mc.set(c, 0, time=0) while True: for c in string.ascii_uppercase: r = random.randint(0, 100) if r < 10 or r > 90: print('Setting value for {}'.format(c), end='') v = random.randint(0, 100) mc.set(c, v, time=0) print(' -> test: v={}'.format(mc.get(c))) print('*') time.sleep(1) # EOF
return self.state['buckets'][hash_result % MMC_CONSISTENT_BUCKETS] return self.state['points'][0]['server'] if __name__ == '__main__': def mm_find_key_host(mm_server, key): mmc = mmc_consistent() for item in mm_server: mmc.mmc_consistent_add_server(server=item, weight=1) server = mmc.mmc_consistent_find_server(key) return server '''mmc = mmc_consistent() for item in mm_server: mmc.mmc_consistent_add_server(server=item, weight=1)''' #print mmc.state['points'] #根据计算得出对应服务器获取对应key数据 验证 key = ['test1','test2','test3','test4','test5','test6','test7','test8','test9'] for item in key: server = mm_find_key_host(mm_server, item) print server mconfig = ['%s:%d' % (server['host'], server['port'])] mm = Client(mconfig) print mm.get(item) #直接使用python_memcached 获取数据验证 mm_server_list = ['192.168.1.96:11211', '192.168.201.109:11211'] mm = Client(mm_server_list) print mm.get(item)
class MemcachedCacheClient(CacheClient): """Memcached cache client implementation.""" def __init__(self, config): super(MemcachedCacheClient, self).__init__(config["host"], config["port"], config["cache"]) self.config = config if self.cache_name != DEFAULT_CACHE_NAME: print "WARNING: memcached client doesn't support named caches. cache_name config value will be ignored and default cache will be used instead." self.memcached_client = Client([self.host + ':' + self.port], debug=0) return def put(self, key, value, version=None, lifespan=None, max_idle=None, put_if_absent=False): time = 0 if lifespan != None: if lifespan > MEMCACHED_LIFESPAN_MAX_SECONDS: self._error("Memcached cache client supports lifespan values only up to %s seconds (30 days)." % MEMCACHED_LIFESPAN_MAX_SECONDS) time = lifespan if max_idle != None: self._error("Memcached cache client doesn't support max idle time setting.") try: if (version == None): if (put_if_absent): if not self.memcached_client.add(key, value, time, 0): # current python-memcached doesn't recoginze these states # if self.memcached_client.last_set_status == "NOT_STORED": # raise ConflictError # else: # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful. Possibly CONFLICT.") else: if not self.memcached_client.set(key, value, time, 0): # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful.") else: try: self.memcached_client.cas_ids[key] = int(version) except ValueError: self._error("Please provide an integer version.") if not self.memcached_client.cas(key, value, time, 0): # if self.memcached_client.last_set_status == "EXISTS": # raise ConflictError # if self.memcached_client.last_set_status == "NOT_FOUND": # raise NotFoundError # else: # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful. Possibly CONFLICT, NOT_FOUND.") except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e) def get(self, key, get_version=False): try: if get_version: val = self.memcached_client.gets(key) if val == None: raise NotFoundError version = self.memcached_client.cas_ids[key] if version == None: self._error("Couldn't obtain version info from memcached server.") return version, val else: val = self.memcached_client.get(key) if val == None: raise NotFoundError return val except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args) def delete(self, key, version=None): try: if version: self._error("versioned delete operation not available for memcached client") if self.memcached_client.delete(key, 0): if self.memcached_client.last_set_status == "NOT_FOUND": raise NotFoundError else: self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args) def clear(self): try: self.memcached_client.flush_all() except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args)
class XmlSpider(Spider): name = 'run_delta' allowed_domains = [] def read_seeds(self, file, type): seeds = [] ok = 0 error = 0 try: for line in open(file, "r"): self.logger.info("[read_seeds] handle line:" + line) parts = line.split() if len(parts) != 2: self.logger.error("[read_seeds] parts error line:%s" % line) error = error + 1 continue url = parts[0].strip() cate = parts[1].strip() if cate not in ('1', '2', '3', '4', '5', '6', '7', '9'): self.logger.error("[read_seeds] cate error line:%s" % line) error = error + 1 continue ok = ok + 1 seeds.append((url, cate, type)) except: return False self.logger.info("[read_seeds] finish, ok:%d,error:%d,file:%s" % (ok, error, file)) return seeds def __init__(self, *args, **kargs): super(XmlSpider, self).__init__(*args, **kargs) xmlfile = kargs.get("xml", "") indexfile = kargs.get("index", "") self.nofilter = (kargs.get("nofilter", "") == "true") self.start_urls = [] if not xmlfile and not indexfile: raise ValueError( "Seeds file error: Must specify seeds file! exam: scrapy crawl **spider -a xml=seeds.txt -a index=seeds2.txt [-a nofilter=true or false], xml and index at least has 1\n\t\tfile format:url\tcate" ) if xmlfile: seeds = self.read_seeds(xmlfile, "xml") if seeds == False: raise ValueError("read file error:" + xmlfile) self.start_urls.extend(seeds) if indexfile: seeds = self.read_seeds(indexfile, "index") if seeds == False: raise ValueError("read file error:" + indexfile) self.start_urls.extend(seeds) if not self.start_urls: raise ValueError("Empty seeds") self.logger.info("nofilter:%s,start urls:%s" % (self.nofilter, self.start_urls)) self.mc = Client(['127.0.0.1:11211']) def start_requests(self): print(self.start_urls) for (url, cate, type) in self.start_urls: yield Request(tools.add_timestamp(url), meta={ 'cate': cate, 'type': type }) def extract_text(self, item, field_name): tmp = item.xpath('%(field_name)s/text()' % locals()) if tmp: value = tmp[0].extract().strip() return oneline(value) def extract_join_text(self, item, field_name): L = [] for tmp in item.xpath('%(field_name)s/text()' % locals()).extract(): tmp = tmp.strip() if tmp: L.append(tmp) if L: return oneline(u';'.join(L)) def is_response_changed(self, response): #if 'mgtv' in response.url: # return True #if tools.del_timestamp(response.url) == "http://www.mgtv.com/xml/sogou/pay/dianshiju_add.xml": # return True if self.nofilter: self.logger.info( "[is_response_changed] nofilter = True, force response changed, url:%s" % (response.url)) return True body_md5 = str2md5(response.body) #self.logger.info("[is_response_changed] url:%s, body: %s, new_size: %s" % (tools.del_timestamp(response.url), response.body, len(response.body))) self.logger.info( "[is_response_changed] url:%s, new_size: %s" % (tools.del_timestamp(response.url), len(response.body))) old_body_md5 = self.mc.get(tools.del_timestamp(response.url)) if old_body_md5 == body_md5: self.logger.info( "[is_response_changed] url:%s,same md5:%s,size:%s" % (tools.del_timestamp(response.url), body_md5, len( response.body))) return False #self.mc.set(tools.del_timestamp(response.url), body_md5) self.logger.info( "[is_response_changed] url:%s,diff md5, new:%s,old:%s, new_size:%s" % (tools.del_timestamp(response.url), body_md5, str(old_body_md5), len(response.body))) return True def parse(self, response): #print type(response.body) t0 = time.time() urltype = response.request.meta["type"] if urltype == 'xml': #need judge whether the whole file changed if not self.is_response_changed(response): dt = (time.time() - t0) * 1000 self.logger.info('[parse] [%s] [not change] %s dt=%.3f ms' % (response.url, urltype, dt)) return #bring self.nofilter to pipelines handler = MyGeneralHandler(response.request.meta['cate'], response.url, self.nofilter) try: if response.encoding == 'utf-8': xml.sax.parseString(response.body, handler) else: xml.sax.parseString( response.body.decode(response.encoding, 'ignore').encode("utf-8").replace( "gbk", "utf-8").replace( "gb18030", "utf-8"), handler) except Exception as e: logging.error( "[PARSE EXCEPTION][sax first] url:|%s|,error:|%s|,encoding:%s,size:|||%s" % (response.url, e, response.encoding, len(response.body))) if 'mgtv' in response.url: try: handler = MyGeneralHandler(response.request.meta['cate'], response.url, self.nofilter) r = requests.get(response.url) xml.sax.parseString(r.content, handler) except Exception as e: logging.error( "[PARSE EXCEPTION][sax second] url:|%s|,error:|%s|,encoding:%s,content:|||%s" % (response.url, e, response.encoding, len(r.content))) dt = (time.time() - t0) * 1000 if handler.items and not handler.items[0]: self.logger.info('[parse] [%s] type:%s,find NONE,%s' % (response.url, str(handler.items))) self.logger.info( '[open_delta] [parse] [%s] type:%s,item_num:%s,cost:%d' % (response.url, urltype, len(handler.items), dt)) return handler.items def parse_old(self, response): t0 = time.time() items = [] # extract returns unicode for loc in response.xpath('//sitemap/loc/text()').extract(): loc = loc.strip() new_task = Request(loc) #self.logger.info('#new_task: ' + repr(new_task)) items.append(new_task) if not self.is_response_changed(response): dt = (time.time() - t0) * 1000 self.logger.info('#parse: [cached] %s %s dt=%.3f ms' % (response.url, len(items), dt)) return items for item in response.xpath('/urlset/url'): D_url = {} names = [ 'workName', 'director', 'region', 'type', 'showTime', 'hot', 'score', 'language', 'introduction', 'definition', 'totalnumber', 'seasonId', 'serialName', 'otherName', 'source_type', 'status', 'nowEpisode' ] for name in names: v = self.extract_text(item, name) if v: D_url[name] = v names_map = { 'update_info': 'update', 'site_updateTime': 'updateTime', 'poster': 'imageLink', 'setAddress': 'listLink', 'horizontalPoster': 'imageLink2', } for name, field in names_map.items(): v = self.extract_text(item, field) if v: D_url[name] = v v = self.extract_text(item, 'introduction') if v: D_url['introduction'] = self.extract_text(item, 'introduction') v = self.extract_join_text(item, 'starrings/starring/name') if v: D_url['starring'] = v v = self.extract_join_text(item, 'starrings/starring/role') if v: D_url['role'] = v for detail in item.xpath('detail'): D_detail = D_url.copy() detail_names = [ 'seq', 'singleTitle', 'singleLink', 'mplayLink', 'singleIntroduction', 'swfUrl', 'resourceTime', 'singleThumbnails', 'captionAvailable', 'vip' ] for name in detail_names: v = self.extract_text(detail, name) if v: D_detail[name] = v v = self.extract_text(detail, 'length') if v: D_detail['timeOfSingle'] = v v = self.extract_text(item, 'singleIntroduction') if v: D_detail['singleIntroduction'] = v # url of the doc D_detail['__url__'] = D_detail['singleLink'] D_detail['__collection__'] = 'teleplay_open' items.append(D_detail) dt = (time.time() - t0) * 1000 self.logger.info('#parse: %s %s dt=%.3f ms' % (response.url, len(items), dt)) return items
from time import time from memcache import Client client = Client(['127.0.0.1:11211']) client.set("my_key", "val " + str(999)) print client.get("my_key") n = 100000 start_time = time() for i in xrange(0, n): client.set(str(i), "val " + str(i)) print "set time: %s ms" % ((time()-start_time)*1000) start_time = time() for i in xrange(0, n): client.get(str(i)) print client.get("555") print "get time: %s ms" % ((time()-start_time)*1000) #print client.stats
class TestMemcache(unittest.TestCase): def setUp(self): # TODO(): unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1) def tearDown(self): self.mc.flush_all() self.mc.disconnect_all() def check_setget(self, key, val, noreply=False): self.mc.set(key, val, noreply=noreply) newval = self.mc.get(key) self.assertEqual(newval, val) def test_setget(self): self.check_setget("a_string", "some random string") self.check_setget("a_string_2", "some random string", noreply=True) self.check_setget("an_integer", 42) self.check_setget("an_integer_2", 42, noreply=True) def test_delete(self): self.check_setget("long", int(1 << 30)) result = self.mc.delete("long") self.assertEqual(result, True) self.assertEqual(self.mc.get("long"), None) @mock.patch.object(_Host, 'send_cmd') @mock.patch.object(_Host, 'readline') def test_touch(self, mock_readline, mock_send_cmd): with captured_stderr(): self.mc.touch('key') mock_send_cmd.assert_called_with(b'touch key 0') def test_get_multi(self): self.check_setget("gm_a_string", "some random string") self.check_setget("gm_an_integer", 42) self.assertEqual( self.mc.get_multi(["gm_a_string", "gm_an_integer"]), {"gm_an_integer": 42, "gm_a_string": "some random string"}) def test_get_unknown_value(self): self.mc.delete("unknown_value") self.assertEqual(self.mc.get("unknown_value"), None) def test_setget_foostruct(self): f = FooStruct() self.check_setget("foostruct", f) self.check_setget("foostruct_2", f, noreply=True) def test_incr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.incr("i_an_integer", 1), 43) def test_incr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.incr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 43) def test_decr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.decr("i_an_integer", 1), 41) def test_decr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.decr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 41) def test_sending_spaces(self): try: self.mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_control_characters(self): try: self.mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_key_too_long(self): try: self.mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError as err: self.assertTrue("length is >" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyLengthError, nothing raised") # These should work. self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1, noreply=True) def test_setget_boolean(self): """GitHub issue #75. Set/get with boolean values.""" self.check_setget("bool", True) def test_unicode_key(self): s = u'\u4f1a' maxlen = SERVER_MAX_KEY_LENGTH // len(s.encode('utf-8')) key = s * maxlen self.mc.set(key, 5) value = self.mc.get(key) self.assertEqual(value, 5) def test_unicode_value(self): key = 'key' value = u'Iñtërnâtiônàlizætiøn2' self.mc.set(key, value) cached_value = self.mc.get(key) self.assertEqual(value, cached_value) def test_binary_string(self): value = 'value_to_be_compressed' compressed_value = zlib.compress(value.encode()) self.mc.set('binary1', compressed_value) compressed_result = self.mc.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) self.mc.add('binary1-add', compressed_value) compressed_result = self.mc.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) self.mc.set_multi({'binary1-set_many': compressed_value}) compressed_result = self.mc.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) def test_ignore_too_large_value(self): # NOTE: "MemCached: while expecting[...]" is normal... key = 'keyhere' value = 'a' * (SERVER_MAX_VALUE_LENGTH // 2) self.assertTrue(self.mc.set(key, value)) self.assertEqual(self.mc.get(key), value) value = 'a' * SERVER_MAX_VALUE_LENGTH with captured_stderr() as log: self.assertIs(self.mc.set(key, value), False) self.assertEqual( log.getvalue(), "MemCached: while expecting 'STORED', got unexpected response " "'SERVER_ERROR object too large for cache'\n" ) # This test fails if the -I option is used on the memcached server self.assertTrue(self.mc.get(key) is None) def test_get_set_multi_key_prefix(self): """Testing set_multi() with no memcacheds running.""" prefix = 'pfx_' values = {'key1': 'a', 'key2': 'b'} errors = self.mc.set_multi(values, key_prefix=prefix) self.assertEqual(errors, []) keys = list(values) self.assertEqual(self.mc.get_multi(keys, key_prefix=prefix), values) def test_set_multi_dead_servers(self): """Testing set_multi() with no memcacheds running.""" self.mc.disconnect_all() with captured_stderr() as log: for server in self.mc.servers: server.mark_dead('test') self.assertIn('Marking dead.', log.getvalue()) errors = self.mc.set_multi({'key1': 'a', 'key2': 'b'}) self.assertEqual(sorted(errors), ['key1', 'key2']) def test_disconnect_all_delete_multi(self): """Testing delete_multi() with no memcacheds running.""" self.mc.disconnect_all() with captured_stderr() as output: ret = self.mc.delete_multi(('keyhere', 'keythere')) self.assertEqual(ret, 1) self.assertEqual( output.getvalue(), "MemCached: while expecting 'DELETED', got unexpected response " "'NOT_FOUND'\n" "MemCached: while expecting 'DELETED', got unexpected response " "'NOT_FOUND'\n" ) @mock.patch.object(_Host, 'send_cmd') # Don't send any commands. @mock.patch.object(_Host, 'readline') def test_touch_unexpected_reply(self, mock_readline, mock_send_cmd): """touch() logs an error upon receiving an unexpected reply.""" mock_readline.return_value = 'SET' # the unexpected reply with captured_stderr() as output: self.mc.touch('key') self.assertEqual( output.getvalue(), "MemCached: touch expected %s, got: 'SET'\n" % b'TOUCHED' )
class TestMemcache(unittest.TestCase): def setUp(self): # TODO: unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1) def tearDown(self): self.mc.disconnect_all() def check_setget(self, key, val, noreply=False): self.mc.set(key, val, noreply=noreply) newval = self.mc.get(key) self.assertEqual(newval, val) def test_setget(self): self.check_setget("a_string", "some random string") self.check_setget("a_string_2", "some random string", noreply=True) self.check_setget("an_integer", 42) self.check_setget("an_integer_2", 42, noreply=True) def test_delete(self): self.check_setget("long", int(1 << 30)) result = self.mc.delete("long") self.assertEqual(result, True) self.assertEqual(self.mc.get("long"), None) def test_get_multi(self): self.check_setget("gm_a_string", "some random string") self.check_setget("gm_an_integer", 42) self.assertEqual( self.mc.get_multi(["gm_a_string", "gm_an_integer"]), {"gm_an_integer": 42, "gm_a_string": "some random string"}) def test_get_unknown_value(self): self.mc.delete("unknown_value") self.assertEqual(self.mc.get("unknown_value"), None) def test_setget_foostruct(self): f = FooStruct() self.check_setget("foostruct", f) self.check_setget("foostruct_2", f, noreply=True) def test_incr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.incr("i_an_integer", 1), 43) def test_incr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.incr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 43) def test_decr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.decr("i_an_integer", 1), 41) def test_decr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.decr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 41) def test_sending_spaces(self): try: self.mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_control_characters(self): try: self.mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_key_too_long(self): try: self.mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError as err: self.assertTrue("length is >" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyLengthError, nothing raised") # These should work. self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1, noreply=True) def test_setget_boolean(self): """GitHub issue #75. Set/get with boolean values.""" self.check_setget("bool", True) def test_unicode_key(self): s = six.u('\u4f1a') maxlen = SERVER_MAX_KEY_LENGTH // len(s.encode('utf-8')) key = s * maxlen self.mc.set(key, 5) value = self.mc.get(key) self.assertEqual(value, 5) def test_ignore_too_large_value(self): # NOTE: "MemCached: while expecting[...]" is normal... key = 'keyhere' value = 'a' * (SERVER_MAX_VALUE_LENGTH // 2) self.assertTrue(self.mc.set(key, value)) self.assertEqual(self.mc.get(key), value) value = 'a' * SERVER_MAX_VALUE_LENGTH self.assertFalse(self.mc.set(key, value)) # This test fails if the -I option is used on the memcached server self.assertTrue(self.mc.get(key) is None) def test_get_set_multi_key_prefix(self): """Testing set_multi() with no memcacheds running.""" prefix = 'pfx_' values = {'key1': 'a', 'key2': 'b'} errors = self.mc.set_multi(values, key_prefix=prefix) self.assertEqual(errors, []) keys = list(values) self.assertEqual(self.mc.get_multi(keys, key_prefix=prefix), values) def test_set_multi_dead_servers(self): """Testing set_multi() with no memcacheds running.""" self.mc.disconnect_all() for server in self.mc.servers: server.mark_dead('test') errors = self.mc.set_multi({'key1': 'a', 'key2': 'b'}) self.assertEqual(sorted(errors), ['key1', 'key2']) def test_disconnect_all_delete_multi(self): """Testing delete_multi() with no memcacheds running.""" self.mc.disconnect_all() ret = self.mc.delete_multi({'keyhere': 'a', 'keythere': 'b'}) self.assertEqual(ret, 1) def test_tags_set(self): self.mc.disconnect_all() self.mc.set('key', 'val', tags=['t1']) self.mc.set('key1', 'val2', tags=['t2']) self.mc.delete_by_tag('t1') self.assertIsNone(self.mc.get('key')) self.assertEqual(self.mc.get('key1'), 'val2') def test_tags_set_multi(self): self.mc.disconnect_all() mapping = { 'k': 1, 'k2': 2, } self.mc.set_multi(mapping, tags=['t1']) self.mc.delete_by_tag('t1') self.assertIsNone(self.mc.get('k')) self.assertIsNone(self.mc.get('k1'))
class PrintFavicon(BaseHandler): def __init__(self): super(PrintFavicon, self).__init__() default_icon_data = self.open(DEFAULT_FAVICON_LOC, time()).read() self.default_icon = Icon(data=default_icon_data, location=DEFAULT_FAVICON_LOC, type=DEFAULT_FAVICON_TYPE) self.env = Environment(loader=FileSystemLoader(os.path.join(cherrypy.config["favicon.root"], "templates"))) self.mc = Client(["%(memcache.host)s:%(memcache.port)d" % cherrypy.config], debug=2) # Initialize counters for counter in ["requests", "hits", "defaults"]: self.mc.add("counter-%s" % counter, "0") def open(self, url, start, headers=None): time_spent = int(time() - start) if time_spent >= TIMEOUT: raise TimeoutError(time_spent) if not headers: headers = dict() headers.update( { "User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; " + "rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13" } ) opener = build_opener(HTTPRedirectHandler(), HTTPCookieProcessor()) return opener.open(Request(url, headers=headers), timeout=min(CONNECTION_TIMEOUT, TIMEOUT - time_spent)) def validateIconResponse(self, iconResponse): if iconResponse.getcode() != 200: cherrypy.log( "Non-success response:%d fetching url:%s" % (iconResponse.getcode(), iconResponse.geturl()), severity=INFO, ) return None iconContentType = iconResponse.info().gettype() if iconContentType in ICON_MIMETYPE_BLACKLIST: cherrypy.log( "Url:%s favicon content-Type:%s blacklisted" % (iconResponse.geturl(), iconContentType), severity=INFO ) return None icon = iconResponse.read() iconLength = len(icon) if iconLength == 0: cherrypy.log("Url:%s null content length" % iconResponse.geturl(), severity=INFO) return None if iconLength < MIN_ICON_LENGTH or iconLength > MAX_ICON_LENGTH: # Issue warning, but accept nonetheless! cherrypy.log( "Warning: url:%s favicon size:%d out of bounds" % (iconResponse.geturl(), iconLength), severity=INFO ) return Icon(data=icon, type=iconContentType) # Icon at [domain]/favicon.ico? def iconAtRoot(self, targetDomain, start): cherrypy.log("Attempting to locate favicon for domain:%s at root" % targetDomain, severity=INFO) rootIconPath = targetDomain + "/favicon.ico" try: rootDomainFaviconResult = self.open(rootIconPath, start) rootIcon = self.validateIconResponse(rootDomainFaviconResult) if rootIcon: cherrypy.log("Found favicon for domain:%s at root" % targetDomain, severity=INFO) self.cacheIcon(targetDomain, rootIcon.data, rootIconPath) rootIcon.location = rootIconPath return rootIcon except: cherrypy.log( "Error fetching favicon at domain root:%s, err:%s, msg:%s" % (targetDomain, sys.exc_info()[0], sys.exc_info()[1]), severity=INFO, ) # Icon specified in page? def iconInPage(self, targetDomain, targetPath, start, refresh=True): cherrypy.log("Attempting to locate embedded favicon link in page:%s" % targetPath, severity=INFO) try: rootDomainPageResult = self.open(targetPath, start) if rootDomainPageResult.getcode() == 200: pageSoup = BeautifulSoup(rootDomainPageResult.read()) pageSoupIcon = pageSoup.find("link", rel=compile("^(shortcut|icon|shortcut icon)$", IGNORECASE)) if pageSoupIcon: pageIconHref = pageSoupIcon.get("href") if pageIconHref: pageIconPath = urljoin(targetPath, pageIconHref) cherrypy.log( "Found embedded favicon link:%s for domain:%s" % (pageIconPath, targetDomain), severity=INFO ) cookies = rootDomainPageResult.headers.getheaders("Set-Cookie") headers = None if cookies: headers = {"Cookie": ";".join(cookies)} pagePathFaviconResult = self.open(pageIconPath, start, headers=headers) pageIcon = self.validateIconResponse(pagePathFaviconResult) if pageIcon: cherrypy.log( "Found favicon at:%s for domain:%s" % (pageIconPath, targetDomain), severity=INFO ) self.cacheIcon(targetDomain, pageIcon.data, pageIconPath) pageIcon.location = pageIconPath return pageIcon else: if refresh: for meta in pageSoup.findAll("meta"): if meta.get("http-equiv", "").lower() == "refresh": match = search("url=([^;]+)", meta.get("content", ""), flags=IGNORECASE) if match: refreshPath = urljoin(rootDomainPageResult.geturl(), match.group(1)) cherrypy.log( "Processing refresh directive:%s for domain:%s" % (refreshPath, targetDomain), severity=INFO, ) return self.iconInPage(targetDomain, refreshPath, start, refresh=False) cherrypy.log("No link tag found:%s" % targetPath, severity=INFO) else: cherrypy.log( "Non-success response:%d for url:%s" % (rootDomainPageResult.getcode(), targetPath), severity=INFO ) except: cherrypy.log( "Error extracting favicon from page:%s, err:%s, msg:%s" % (targetPath, sys.exc_info()[0], sys.exc_info()[1]), severity=WARNING, ) def cacheIcon(self, domain, icon, loc): cherrypy.log("Caching icon at location:%s for domain:%s" % (loc, domain), severity=INFO) if not self.mc.set("icon-%s" % domain, icon, time=MC_CACHE_TIME): cherrypy.log("Could not cache icon for domain:%s" % domain, severity=ERROR) def iconInCache(self, targetDomain, start): icon = self.mc.get("icon-%s" % targetDomain) if icon: self.mc.incr("counter-hits") cherrypy.log("Cache hit:%s" % targetDomain, severity=INFO) cherrypy.response.headers["X-Cache"] = "Hit" if icon == "DEFAULT": self.mc.incr("counter-defaults") cherrypy.response.headers["X-Cache"] = "Hit" return self.default_icon else: return Icon(data=icon) def writeIcon(self, icon): self.writeHeaders(icon) return icon.data def writeHeaders(self, icon, fmt="%a, %d %b %Y %H:%M:%S %z"): # MIME Type cherrypy.response.headers["Content-Type"] = icon.type or "image/x-icon" # Set caching headers cherrypy.response.headers["Cache-Control"] = "public, max-age=2592000" cherrypy.response.headers["Expires"] = (datetime.now() + timedelta(days=30)).strftime(fmt) def parse(self, url): # Get page path targetPath = self.urldecode(url) if not targetPath.startswith("http"): targetPath = "http://%s" % targetPath cherrypy.log("Decoded URL:%s" % targetPath, severity=INFO) # Split path to get domain targetURL = urlparse(targetPath) if not targetURL or not targetURL.scheme or not targetURL.netloc: raise cherrypy.HTTPError(400, "Malformed URL:%s" % url) targetDomain = "%s://%s" % (targetURL.scheme, targetURL.netloc) cherrypy.log("URL:%s, domain:%s" % (targetPath, targetDomain), severity=INFO) return (targetPath, targetDomain) @cherrypy.expose def index(self): status = {"status": "ok", "counters": dict()} for counter in ["requests", "hits", "defaults"]: status["counters"][counter] = self.mc.get("counter-%s" % counter) return json.dumps(status) @cherrypy.expose def test(self): topSites = open(os.path.join(cherrypy.config["favicon.root"], "topsites.txt"), "r").read().split() template = self.env.get_template("test.html") return template.render(topSites=topSites) @cherrypy.expose def clear(self, url): cherrypy.log("Incoming cache invalidation request:%s" % url, severity=INFO) targetPath, targetDomain = self.parse(str(url)) self.mc.delete("icon_loc-%s" % targetDomain) cherrypy.log("Evicted cache entry for %s" % targetDomain, severity=INFO) @cherrypy.expose def s(self, url, skipCache="false"): start = time() if skipCache.lower() == "true": skipCache = True else: skipCache = False cherrypy.log("Incoming request:%s (skipCache=%s)" % (url, skipCache), severity=INFO) self.mc.incr("counter-requests") targetPath, targetDomain = self.parse(str(url)) icon = ( (not skipCache and self.iconInCache(targetDomain, start)) or self.iconInPage(targetDomain, targetPath, start) or self.iconAtRoot(targetDomain, start) ) if not icon: cherrypy.log("Falling back to default icon for:%s" % targetDomain, severity=INFO) self.cacheIcon(targetDomain, "DEFAULT", "DEFAULT_LOC") self.mc.incr("counter-defaults") icon = self.default_icon cherrypy.log("Time taken to process domain:%s %f" % (targetDomain, time() - start), severity=INFO) return self.writeIcon(icon)
class MemcachedCacheClient(CacheClient): """Memcached cache client implementation.""" def __init__(self, config): super(MemcachedCacheClient, self).__init__(config["host"], config["port"], config["cache"]) self.config = config if self.cache_name and self.cache_name != "" and self.cache_name != DEFAULT_MEMCACHED_CACHE_NAME: print "WARNING: memcached client doesn't support named caches. cache_name config value will be ignored and the cache name configured on the server will be used instead." self.memcached_client = Client([self.host + ':' + self.port], debug=0) return def put(self, key, value, version=None, lifespan=None, max_idle=None, put_if_absent=False): time = 0 if lifespan != None: if lifespan > MEMCACHED_LIFESPAN_MAX_SECONDS: self._error("Memcached cache client supports lifespan values only up to %s seconds (30 days)." % MEMCACHED_LIFESPAN_MAX_SECONDS) time = lifespan if max_idle != None: self._error("Memcached cache client doesn't support max idle time setting.") try: if (version == None): if (put_if_absent): if not self.memcached_client.add(key, value, time, 0): # current python-memcached doesn't recoginze these states # if self.memcached_client.last_set_status == "NOT_STORED": # raise ConflictError # else: # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful. Possibly CONFLICT.") else: if not self.memcached_client.set(key, value, time, 0): # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful.") else: try: self.memcached_client.cas_ids[key] = int(version) except ValueError: self._error("Please provide an integer version.") if not self.memcached_client.cas(key, value, time, 0): # if self.memcached_client.last_set_status == "EXISTS": # raise ConflictError # if self.memcached_client.last_set_status == "NOT_FOUND": # raise NotFoundError # else: # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful. Possibly CONFLICT, NOT_FOUND.") except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e) def get(self, key, get_version=False): try: if get_version: val = self.memcached_client.gets(key) if val == None: raise NotFoundError version = self.memcached_client.cas_ids[key] if version == None: self._error("Couldn't obtain version info from memcached server.") return version, val else: val = self.memcached_client.get(key) if val == None: raise NotFoundError return val except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args) def delete(self, key, version=None): try: if version: self._error("versioned delete operation not available for memcached client") if self.memcached_client.delete(key, 0): if self.memcached_client.last_set_status == "NOT_FOUND": raise NotFoundError else: self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args) def clear(self): try: self.memcached_client.flush_all() except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args)
from memcache import Client """ 安装memcache时,遇到的问题,提示libevent 解决 yum install libevent yum install libevent-devel """ # 这里是个list,可以吧memcache集群这么搞 MC_SERVERS = ['192.168.52.3:11211', '192.168.52.3:11212'] CONN = Client(MC_SERVERS) status = CONN.set('key1', 'val2', 0) print(status) status = CONN.delete('key') print(status) status = CONN.add('key', 'val', 20) print(status) status = CONN.replace('key', 'val1', 0) print(status) status = CONN.append('key', ',val2') print(status) data = CONN.get('key') print(data)