def whereyoulive_sum(): global addresses ret = "<table class=\"normal\">" ret += "<th class=\"normal\">" + "Address" + "</th>" ret += "<th class=\"normal\">" + "Total/Address" + "</th>" conn = Client([gethostname()+':11211']) vals = [] for k in addresses: ret += "<tr class=\"normal\">" ret += "<td class=\"normal\">" + k + "</td>" val = conn.get(addr_prefix+k) if val == None: vals.append(0) else: vals.append(val) ret += "<td class=\"normal\"><span>" + str(vals[-1]) + "</span></td>" ret += "</tr>" ret += "<tr align=\"center\"><td class=\"normal\">" + "Sum" + "</td>" if len(vals) == 0: ret += "<td class=\"normal\">" + "0" + "</td></tr>" else: ret += "<td class=\"normal\">" + str(reduce(lambda i, j : i+j, [i for i in vals])) + "</td></tr>" ret += "</table>" return ret
class MemcacheStoreTest(unittest.TestCase, SimpleKVTest): def setUp(self): self.mc = Client([testconf.get('memcache', 'server')]) self.mc.flush_all() self.store = MemcacheStore(self.mc) def tearDown(self): self.mc.flush_all() def test_memcache_connection(self): pass # disabled tests (not fully API support for memcache) test_has_key = None test_has_key_with_delete = None test_key_iterator = None test_keys = None def test_keys_throws_io_error(self): with self.assertRaises(IOError): self.store.keys() with self.assertRaises(IOError): self.store.iter_keys() with self.assertRaises(IOError): iter(self.store) def test_contains_throws_io_error_or_succeeds(self): try: 'a' in self.store except IOError: pass
def remove(self, layer, coord, format): """ Remove a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision) mem.delete(key)
class MemcachedZipState(object): """get or set state of an zip job. """ def __init__(self, uid): self._uid = uid if ZIPSTATE_MEMCACHEDSERVER not in os.environ: raise ValueError( 'Expect environment variable "{0}: pointing a memcached ' 'server in order to share state ' 'information.'.format(ZIPSTATE_MEMCACHEDSERVER) ) self._client = Client([os.environ[ZIPSTATE_MEMCACHEDSERVER]]) def _combined_key(self, key): return '{0}-{1}'.format(self._uid, key) def __getitem__(self, key): """get state of zip job """ return self._client.get(self._combined_key(key)) def __setitem__(self, key, value): """set state of zip job """ return self._client.set(self._combined_key(key), value)
class SocketHandler(websocket.WebSocketHandler): client_infos = defaultdict() def __init__(self, *args, **kwargs): super(SocketHandler, self).__init__(*args, **kwargs) servers = ['127.0.0.1:11211'] self._mc = Client(servers) def open(self): self.client_infos[self] = ClientInfo() def on_close(self): del self.client_infos[self] def on_message(self, message): now = datetime.now() delta = now - self.client_infos[self].prev_message_time # TODO: Restrict access based on delta. time = self._mc.get('time') if self.client_infos[self].prev_image_time == time: self.write_message({'timeout': 50}) else: image = self._mc.get('image') image = base64.b64encode(image) self.write_message({'image': image, 'timeout': 80}) self.client_infos[self].prev_image_time = time self.client_infos[self].prev_message_time = now def check_origin(self, origin): return True
def save(self, body, layer, coord, format): """ Save a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision) mem.set(key, body, layer.cache_lifespan or 0)
class FoursquareAPI: url_base = "https://api.foursquare.com/v2/%s" payload = {} mc = False mcprefix = False def __init__(self, lifestream): OAUTH_FILENAME = lifestream.config.get("foursquare", "secrets_file") CONSUMER_KEY = lifestream.config.get("foursquare", "client_id") CONSUMER_SECRET = lifestream.config.get("foursquare", "secret") MEMCACHE_HOST = lifestream.config.get("memcache", "host") MEMCACHE_PORT = lifestream.config.get("memcache", "port") self.mcprefix = lifestream.config.get("memcache", "prefix") servers = ["%s:%s" % (MEMCACHE_HOST, MEMCACHE_PORT)] self.mc = Client(servers, debug=1) if not os.path.exists(OAUTH_FILENAME): logger.error("No OAUTH found at %s" % OAUTH_FILENAME) raise Exception("You need to run foursquare_oauth.py to generate the oauth key") oauth_token, oauth_token_secret = read_token_file(OAUTH_FILENAME) self.payload = { 'v': "20170801", 'oauth_token' : oauth_token } def cache_get(self, url, params): m = hashlib.sha224() m.update(url) m.update(str(params)) key = m.hexdigest() res = self.mc.get(key) if(res): return json.loads(res) r = requests.get(self.url_base % "users/self/checkins", params=self.payload) self.mc.set(key, json.dumps(r.json())) return r.json() def my_checkins(self): return self.cache_get(self.url_base % "users/self/checkins", params=self.payload) def search_near(self, lat, lng, intent="checkin", radius=50, limit=10): payload = self.payload payload['ll'] = "%s,%s" % (lat, lng) payload['intent'] = intent payload['radius'] = radius payload['limit'] = limit return self.cache_get(self.url_base % "venues/search", params=self.payload)
def remove(self, layer, coord, format): """ Remove a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) mem.delete(key) mem.disconnect_all()
def unlock(self, layer, coord, format): """ Release a cache lock for this tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) mem.delete(key + "-lock") mem.disconnect_all()
def save(self, body, layer, coord, format): """ Save a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) mem.set(key, body, layer.cache_lifespan or 0) mem.disconnect_all()
def preset(sample): global addresses conn = Client([gethostname()+':11211']) if conn.get_stats()[0][1]['curr_items'] == 0: for k in addresses: conn.set(addr_prefix+k, 0)
def read(self, layer, coord, format): """ Read a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) value = mem.get(key) mem.disconnect_all() return value
def save(self, body, layer, coord, format): """ Save a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) if body is not None: body = b64encode(body).decode('ascii') mem.set(key, body, layer.cache_lifespan or 0) mem.disconnect_all()
def read(self, layer, coord, format): """ Read a cached tile. """ mem = Client(self.servers) key = tile_key(layer, coord, format, self.revision, self.key_prefix) value = mem.get(key) mem.disconnect_all() if value is None: return None return b64decode(value.encode('ascii'))
class WrappedClient(object): def __init__(self, *args): self.args = args self.mc = Client(*args, cache_cas=True, socket_timeout=10) self.del_que = [] import threading def gets(self, key): while True: result = self.mc.gets(key) if isinstance(result, tuple): return result[0] return result def cas(self, key, value): retry_count = 0 try: while True: result = self.mc.cas(key, value) if not isinstance(result, bool): if retry_count <= 10: retry_count += 1 wait_time = 0.001 * randint(0, 1 << retry_count) print "add fail, retry for sleep" sleep(wait_time) self.mc = Client(*self.args, cache_cas=True, socket_timeout=10) continue # raise ConnectionError return result except TypeError: return False def add(self, key, value): retry_count = 0 while True: result = self.mc.add(key, value) if not isinstance(result, bool): if retry_count <= 10: retry_count += 1 wait_time = 0.001 * randint(0, 1 << retry_count) print "add fail, retry for sleep" sleep(wait_time) print self.args self.mc = Client(*self.args, cache_cas=True, socket_timeout=10) continue # raise ConnectionError return result # delegation def __getattr__(self, attrname): return getattr(self.mc, attrname)
class MemcachedManager(): class Prefix(): Session = 'SESSION' SysCache = 'ORDER_SYS_CACHE' def __init__(self): # if settings.ENVIRONMENT['dev']: # pass # else: self.conn = Client(settings.CACHE['clients']) def get(self, *arg): if len(arg) ==0: return None arg = [str(i) for i in arg] key = PRODUCT_PREFIX+ '|'.join(arg) try: # if settings.ENVIRONMENT['dev']: # return _localCache.get(key) # else: return self.conn.get(key) # self.conn. except:# Client.MemcachedKeyNoneError: return None def set(self, value, timeout, *arg): if len(arg) ==0: return None arg = [str(i) for i in arg] key = PRODUCT_PREFIX+'|'.join(arg) # if settings.ENVIRONMENT['dev']: # _localCache[key] = value # else: self.conn.set(key, value, timeout) def delete(self, *arg): if len(arg) ==0: return None arg = [str(i) for i in arg] key = PRODUCT_PREFIX+'|'.join(arg) try: # if settings.ENVIRONMENT['dev']: # if _localCache.has_key(key): # _localCache.pop(key) # else: self.conn.delete(key) except: # Client.MemcachedKeyNoneError: return None
def __init__(self, urls, controllers, config=None, auth_class=Authentication): super(StorageServerApp, self).__init__(urls, controllers, config, auth_class) self.config = config # Collecting the host-specific config and building connectors. self.storages = {'default': get_storage(config)} hostnames = set() host_token = 'host:' for cfgkey in config: if cfgkey.startswith(host_token): # Get the hostname from the config key. This assumes # that host-specific keys have two trailing components # that specify the setting to override. # E.g: "host:localhost.storage.sqluri" => "localhost" hostname = cfgkey[len(host_token):].rsplit(".", 2)[0] hostnames.add(hostname) for hostname in hostnames: host_cfg = self._host_specific(hostname, config) self.storages[hostname] = get_storage(host_cfg) self.check_blacklist = \ self.config.get('storage.check_blacklisted_nodes', False) if self.check_blacklist and Client is not None: servers = self.config.get('storage.cache_servers', '127.0.0.1:11211') self.cache = Client(servers.split(',')) else: if self.check_blacklist: raise ValueError('The "check_blacklisted_node" option ' 'needs a memcached server') self.cache = None
def __init__(self, config): super(MemcachedCacheClient, self).__init__(config["host"], config["port"], config["cache"]) self.config = config if self.cache_name and self.cache_name != "" and self.cache_name != DEFAULT_MEMCACHED_CACHE_NAME: print "WARNING: memcached client doesn't support named caches. cache_name config value will be ignored and the cache name configured on the server will be used instead." self.memcached_client = Client([self.host + ':' + self.port], debug=0) return
class Store(object): def __init__(self, host='127.0.0.1', port=11211): self.client = Client(['%s:%d' % (host, port),], pickler=JSONPickler, unpickler=JSONUnpickler) def get(self, key): result = self.client.get(key) if result: result = result.split('\t') return result def set(self, key, val): result = self.client.add(key, val) if not result: result = self.client.append(key, '\t%s' % val) return result
class MemcacheStore(DictProxy): def __init__(self, host, port): from memcache import Client self.mc= Client(['%s:%s' % (host, port)]) def update(self, updates): for update in updates: (processId, data)= update self.mc.set(processId, data) def get(self, processId, default= None): data= self.mc.get(processId) if data == None: return default return data def pop(self, processId): data= self.mc.get(processId) self.mc.delete(processId) return data if data == None: return default def __len__(self): return int(self.mc.get_stats()[0][1].get('curr_items'))
class Memcache(object): """ address=127.0.0.1:11211 # TODO 链接断开后没有自动重连机制 """ def __init__(self, address): from memcache import Client self.conn = Client([address]) def get(self, key): return self.conn.get(key) def set(self, key, value, timeout): self.conn.set(key, value, timeout) def delete(self, key): self.conn.delete(key)
def __init__(self, hosts): """ Feature storage implemented in Memcache :param list hosts: list of hosts 1. Strings of the form C{"host:port"} 2. Tuples of the form C{("host:port", weight)} :rtype :class:`MemcacheFeatureStorage` """ self._client = Client(servers=hosts)
def __init__(self, uid): self._uid = uid if ZIPSTATE_MEMCACHEDSERVER not in os.environ: raise ValueError( 'Expect environment variable "{0}: pointing a memcached ' 'server in order to share state ' 'information.'.format(ZIPSTATE_MEMCACHEDSERVER) ) self._client = Client([os.environ[ZIPSTATE_MEMCACHEDSERVER]])
def __init__(self, *args): from memcache import Client self.mc = Client(*args) self.del_que = [] self.random = Random() self.random.seed() from threading import Thread self.del_thread = Thread(target = lambda:self._async_delete()) self.del_thread.setDaemon(True) self.del_thread.start()
class MemCacheWrapper(object): """ Memcache client wrapper. No exception raise and add some useful function. """ def __init__(self, servers, logerr=None): self.cache = MCClient(servers=servers, debug=False) self.logerr = logerr def add(self, key, val=1, time=0): try: return self.cache.add(key, val, time) except Exception as e: _logger.warning("Exception during `add`: %s", e) return None def count(self, key, expires=0, delta=1): try: result = self.cache.incr(key, delta) if result is None: if not self.cache.add(key, delta, expires): result = self.cache.incr(key, delta) else: return delta return result except Exception as e: _logger.warning("Exception during `count`: %s", e) return None def get(self, key): result = None try: result = self.cache.get(str(key)) except Exception as e: _logger.warning("Exception during `get`: %s", e) return result def set(self, key, value, expires): result = False try: result = self.cache.set(str(key), value, expires) except Exception as e: _logger.warning("Exception during `set`: %s", e) return result def delete(self, key): result = False try: result = self.cache.delete(key) except Exception as e: _logger.warning("Exception during `del`: %s", e) return result
class MemcacheBackend(StorageBackend): def __init__(self, options, args): if not options.servers: raise Exception("memcache servers are required") self.servers = options.servers.split(",") self.open() @classmethod def parse_arguments(cls, optparse): optparse.add_option( "-m", "--servers", dest="servers", help="comma-separated list of memcached servers", metavar="SERVERS", default="localhost:11211", ) def _encode_key(self, key): return key.encode("base64").rstrip("\n") def _decode_key(self, key): return key.decode("base64") def _get(self, key, default=None): return self.mc.get(self._encode_key(key)) def _get_multi(self, keys): keys = map(self._encode_key, keys) ret = self.mc.get_multi(keys) return dict((self._decode_key(key), value) for (key, value) in ret.iteritems()) def _put(self, key, val): return self.mc.set(self._encode_key(key), val) def _put_multi(self, keys): keys = dict((self._encode_key(key), value) for (key, value) in keys.iteritems()) self.mc.set_multi(keys) def _delete(self, key): self.mc.delete(self._encode_key(key)) def close(self): if getattr(self, "mc", None): self.mc.disconnect_all() self.mc = None def open(self): self.close() self.mc = MemcacheClient(self.servers) def stats(self): return dict(self.mc.get_stats())
def __init__(self, servers=["127.0.0.1:11211"], expire=0, debug=False): """ :param servers: List of servers to use. Please, read memcache.Client help. """ self._client = MemcacheClient(servers) self._expire = expire logging.basicConfig(level=logging.WARNING) self.log = logging.getLogger("Memcache-Gateway") if debug: self.log.setLevel(logging.DEBUG)
class OODictMongoMemcache(OODictMongo): """Add memcache caching capabilities to a OODictMongo.""" def __init__(self, memcache_host, mongo_uri, mongo_db, mongo_coll, mongo_id="_id", mongo_client=None, memcache_client=None, _dict=None): super(OODictMongoMemcache, self).__init__( mongo_uri, mongo_db, mongo_coll, mongo_id, mongo_client, _dict ) self._memcache_host = memcache_host ## self._memcache_lock = memache_lock if memcache_client is None: self._memcache = MemcacheClient(memcache_host) else: self._memcache = memcache_client def _memcache_key(self, mongo_id=None): return str("%s:%s:%s" % (self._mongo_db, self._mongo_coll, mongo_id or self._dict.get(self._mongo_id, ''))) def get_from_field(self, key, value, flush=False): """Get user by a key:value pair from mongo or memcache.""" # if searching by id key, then we can find it in memcache if not flush and key == self._mongo_id: item = self._memcache.get(self._memcache_key(value)) if item: log.info("Cache hit.") return self._reinit(item) log.info("Cache miss.") # didn't find it in memcache, search in mongo and update cache super(OODictMongoMemcache, self).get_from_field(key, value) item = self._dict if item: self._memcache.set(self._memcache_key(), item) return self._reinit(item) def refresh(self, flush=False): """Refresh self data from memcache. If flush is True, then flush memcache entry and force a refresh from mongo. """ self.get_from_field(self._mongo_id, self._dict[self._mongo_id], flush) def save(self): """Save user data to storage.""" self._memcache.set(self._memcache_key(), self._dict) super(OODictMongoMemcache, self).save() def delete(self): """Delete user from storage.""" self._memcache.delete(self._memcache_key()) super(OODictMongoMemcache, self).delete()
class MemcachedCache(CacheBase): def __init__(self, config, section): from memcache import Client servers = config.get(section, 'memcache-server').split() self.conn = Client(servers) def key(self, raw): if sys.version_info >= (3, ): return self.prefix(hashlib.md5(bytes(raw, 'utf-8')).hexdigest()) else: return self.prefix(hashlib.md5(raw).hexdigest()).encode('utf-8') def check_password(self, user, password): cached = self.conn.get(self.key('%s-pass' % user)) if cached is None: return cached return cached == self.hash(password, cached) def set_password(self, user, password): self.conn.set(self.key('%s-pass' % user), self.hash(password, None), self.expire) def in_groups(self, user, groups): cached = self.conn.get(self.key('%s-groups' % user)) if cached is None: return None return not cached.isdisjoint(groups) def set_groups(self, user, groups): self.conn.set(self.key('%s-groups' % user), groups, self.expire)
def __init__(self, memcache_host, mongo_uri, mongo_db, mongo_coll, mongo_id="_id", mongo_client=None, memcache_client=None, _dict=None): super(OODictMongoMemcache, self).__init__( mongo_uri, mongo_db, mongo_coll, mongo_id, mongo_client, _dict ) self._memcache_host = memcache_host ## self._memcache_lock = memache_lock if memcache_client is None: self._memcache = MemcacheClient(memcache_host) else: self._memcache = memcache_client
def __enter__(self): self.conn = Client(*self.args, **self.kwargs) return self.conn
#from redis import Redis from memcache import Client from datetime import datetime # port = '11211' #host = 'localhost' memcached_port = '9150' redis_port = '9250' password = '' hostname = 'ec2-54-183-15-168.us-west-1.compute.amazonaws.com' mc = Client([hostname + ':' + memcached_port], debug=0) #mc = Redis(host=hostname, port=redis_port, password=password) start_time = datetime.now() mc.set("first_key", "first value") value = mc.get("first_key") print(value) mc.set("second_key", 3) mc.delete("second_key") mc.set("key", "1") # note that the key used for incr/decr must be # a string. value = mc.get('key') print(value) mc.incr("key") value = mc.get('key') print(value) mc.decr("key")
from memcache import Client import pandas as pd memcached_port = '9150' hostname = 'ec2-54-183-15-168.us-west-1.compute.amazonaws.com' mc = Client([hostname + ':' + memcached_port], debug=0) #defining memcached client df = pd.read_csv("SO2 Emissions Key Value Cache.csv" ) # reading csv file to a pandas dataframe for key in range(len(df)): mc.set(str(key) + 'Ali', df.loc[key, ] ) #writing each row of a dataframe as key-value pair in memcached read_df = pd.DataFrame(mc.get('0Ali')).transpose( ) #reading first item of database in a new pandas dataframe for key in range(1, len(df)): read_df = read_df.append( mc.get(str(key) + 'Ali')) #reading all remaining items in respective dataframe read_df.to_csv("New_SO2 Emissions Key Value Cache.csv" ) #writling new dataframe into a new csv file
def set_multi(self, mapping, **kw): kw['key_prefix'] = "%s%s" % (self.uniqeKey, kw.get( 'key_prefix', '')) return Client.set_multi(self, mapping, **kw)
class TestMemcache(unittest.TestCase): def setUp(self): # TODO(): unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1) def tearDown(self): self.mc.flush_all() self.mc.disconnect_all() def check_setget(self, key, val, noreply=False): self.mc.set(key, val, noreply=noreply) newval = self.mc.get(key) self.assertEqual(newval, val) def test_setget(self): self.check_setget("a_string", "some random string") self.check_setget("a_string_2", "some random string", noreply=True) self.check_setget("an_integer", 42) self.check_setget("an_integer_2", 42, noreply=True) def test_delete(self): self.check_setget("long", int(1 << 30)) result = self.mc.delete("long") self.assertEqual(result, True) self.assertEqual(self.mc.get("long"), None) @mock.patch.object(_Host, 'send_cmd') @mock.patch.object(_Host, 'readline') def test_touch(self, mock_readline, mock_send_cmd): with captured_stderr(): self.mc.touch('key') mock_send_cmd.assert_called_with(b'touch key 0') def test_get_multi(self): self.check_setget("gm_a_string", "some random string") self.check_setget("gm_an_integer", 42) self.assertEqual(self.mc.get_multi(["gm_a_string", "gm_an_integer"]), { "gm_an_integer": 42, "gm_a_string": "some random string" }) def test_get_unknown_value(self): self.mc.delete("unknown_value") self.assertEqual(self.mc.get("unknown_value"), None) def test_setget_foostruct(self): f = FooStruct() self.check_setget("foostruct", f) self.check_setget("foostruct_2", f, noreply=True) def test_incr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.incr("i_an_integer", 1), 43) def test_incr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.incr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 43) def test_decr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.decr("i_an_integer", 1), 41) def test_decr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.decr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 41) def test_sending_spaces(self): try: self.mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_control_characters(self): try: self.mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_key_too_long(self): try: self.mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError as err: self.assertTrue("length is >" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyLengthError, nothing raised") # These should work. self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1, noreply=True) def test_setget_boolean(self): """GitHub issue #75. Set/get with boolean values.""" self.check_setget("bool", True) def test_unicode_key(self): s = u'\u4f1a' maxlen = SERVER_MAX_KEY_LENGTH // len(s.encode('utf-8')) key = s * maxlen self.mc.set(key, 5) value = self.mc.get(key) self.assertEqual(value, 5) def test_unicode_value(self): key = 'key' value = u'Iñtërnâtiônàlizætiøn2' self.mc.set(key, value) cached_value = self.mc.get(key) self.assertEqual(value, cached_value) def test_binary_string(self): value = 'value_to_be_compressed' compressed_value = zlib.compress(value.encode()) self.mc.set('binary1', compressed_value) compressed_result = self.mc.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) self.mc.add('binary1-add', compressed_value) compressed_result = self.mc.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) self.mc.set_multi({'binary1-set_many': compressed_value}) compressed_result = self.mc.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, zlib.decompress(compressed_result).decode()) def test_ignore_too_large_value(self): # NOTE: "MemCached: while expecting[...]" is normal... key = 'keyhere' value = 'a' * (SERVER_MAX_VALUE_LENGTH // 2) self.assertTrue(self.mc.set(key, value)) self.assertEqual(self.mc.get(key), value) value = 'a' * SERVER_MAX_VALUE_LENGTH with captured_stderr() as log: self.assertIs(self.mc.set(key, value), False) self.assertEqual( log.getvalue(), "MemCached: while expecting 'STORED', got unexpected response " "'SERVER_ERROR object too large for cache'\n") # This test fails if the -I option is used on the memcached server self.assertTrue(self.mc.get(key) is None) def test_get_set_multi_key_prefix(self): """Testing set_multi() with no memcacheds running.""" prefix = 'pfx_' values = {'key1': 'a', 'key2': 'b'} errors = self.mc.set_multi(values, key_prefix=prefix) self.assertEqual(errors, []) keys = list(values) self.assertEqual(self.mc.get_multi(keys, key_prefix=prefix), values) def test_set_multi_dead_servers(self): """Testing set_multi() with no memcacheds running.""" self.mc.disconnect_all() with captured_stderr() as log: for server in self.mc.servers: server.mark_dead('test') self.assertIn('Marking dead.', log.getvalue()) errors = self.mc.set_multi({'key1': 'a', 'key2': 'b'}) self.assertEqual(sorted(errors), ['key1', 'key2']) def test_disconnect_all_delete_multi(self): """Testing delete_multi() with no memcacheds running.""" self.mc.disconnect_all() with captured_stderr() as output: ret = self.mc.delete_multi(('keyhere', 'keythere')) self.assertEqual(ret, 1) self.assertEqual( output.getvalue(), "MemCached: while expecting 'DELETED', got unexpected response " "'NOT_FOUND'\n" "MemCached: while expecting 'DELETED', got unexpected response " "'NOT_FOUND'\n") @mock.patch.object(_Host, 'send_cmd') # Don't send any commands. @mock.patch.object(_Host, 'readline') def test_touch_unexpected_reply(self, mock_readline, mock_send_cmd): """touch() logs an error upon receiving an unexpected reply.""" mock_readline.return_value = 'SET' # the unexpected reply with captured_stderr() as output: self.mc.touch('key') self.assertEqual( output.getvalue(), "MemCached: touch expected %s, got: 'SET'\n" % b'TOUCHED')
def __init__(self, servers=None, debug=False): if servers is None: servers = self.servers from memcache import Client as MemcachedClient self._client = MemcachedClient(servers, debug)
def __init__(self, *args, **kwargs): self.uniqeKey = kwargs.get('key', '') self.meta = kwargs.get('meta', '') del kwargs['key'], kwargs['meta'] Client.__init__(self, *args, **kwargs)
def index(request): mc = Client(["127.0.0.1:11211"]) # morgan.online.ntnu.no lists = mc.get("sympa_lists") # If we couldn't find any information in memcache right now if lists is None: lists = [{ 'name': 'linjeforeninger', 'members': [{ 'subscriber': '', 'name': 'Alf', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Abakus', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Bergstuderendes Forening', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Janus', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Placebo', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'HC', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Volvox', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Socius Extremus', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Erudio', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Paideia', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'CAF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'ELF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'PSI', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Katharsis', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Geolf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ludimus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Passe Simple', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Eureka', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Sturm Und Drang', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Theodor', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kwakiutl', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Primetime', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Akwaaba', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Jump Cut', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Panoptikon', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kultura', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Gengangere', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Pareto', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Dionysos', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Folkevalgte', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Apeiron', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Psykolosjen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Det Historiske Selskab', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'LiMP', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ivrig', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Teaterlosjen', 'email': '*****@*****.**' }] }, { 'name': 'dragvoll', 'members': [{ 'subscriber': 'mail', 'name': 'Karrieredagen Dragvoll', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Det Historiske Selskab', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'CAF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'ELF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'PSI', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Katharsis', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Geolf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Passe Simple', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Eureka', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Sturm Und Drang', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Theodor', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kwakiutl', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Primetime', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Akwaaba', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Jump Cut', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Panoptikon', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Kultura', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Teaterlosjen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Pareto', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Dionysos', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'De Folkevalgte', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Apeiron', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Psykolosjen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Paideia', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Socius Extremus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Erudio', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studentrådet SVT', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'LiMP', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ivrig', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studentrådet HF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Ludimus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Gengangere', 'email': '*****@*****.**' }] }, { 'name': 'gloshaugen', 'members': [{ 'subscriber': 'mail', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Volvox', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Alf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'HC', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Janus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Bergstuderendes Forening', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Placebo', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Abakus', 'email': '*****@*****.**' }] }, { 'name': 'kjellere', 'members': [{ 'subscriber': '', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Bergstuderendes Forening Kjellersjef', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Dragvoll Kjelleren', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'ICOT', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Emil Kjellern', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Lauget', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'De Taktlause', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'HC', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Dykkergruppa', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'LaBamba', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Janus Kjellersjef', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Realfagskjellern', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'isu-orga', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'NTNUI', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Psykolosjen', 'email': '*****@*****.**' }] }, { 'name': 'foreninger', 'members': [{ 'subscriber': '', 'name': 'NTNUI', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Radio Revolt', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Styret i Samfundet', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Student-TV', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Under Dusken', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Velferdstinget', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Linjeforeninger på NTN', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Masterforeninger på Gløshaugen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studenttinget', 'email': '*****@*****.**' }] }, { 'name': 'masterforeninger', 'members': [{ 'subscriber': 'mail', 'name': 'Soma', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Projeksjon', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Symbiosis', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Signifikant', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Hippodamus', 'email': '*****@*****.**' }] }, { 'name': 'sr-samarbeid', 'members': [{ 'subscriber': '', 'name': 'Studentrådet IME', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Abakus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }] }, { 'name': 'ivt-samarbeid', 'members': [{ 'subscriber': 'mail', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Studentrådet IVT', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'BSF', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Teknologiporten', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }] }, { 'name': 'linjeledere', 'members': [{ 'subscriber': 'mail', 'name': 'Timini', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Nabla', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Placebo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Omega', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Smørekoppen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Hybrida', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Janus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Leonardo', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Solan', 'email': '*****@*****.**' }, { 'subscriber': '', 'name': 'Tidligere Linjeledere', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Spanskrøret', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Mannhullet', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Abakus', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Emil', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Online', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Volvox', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Aarhønen', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Berg', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Delta', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'Alf', 'email': '*****@*****.**' }, { 'subscriber': 'mail', 'name': 'HC', 'email': '*****@*****.**' }] }] return render(request, 'mailinglists/index.html', {'lists': lists})
def get_multi(self, keys, **kw): kw['key_prefix'] = "%s%s" % (self.uniqeKey, kw.get( 'key_prefix', '')) return Client.get_multi(self, keys, **kw)
from memcache import Client from app import conf memcached = Client([conf.MEMCACHED_HOST])
""" This is a simple app to generate random numbers for keys stored in memcached. The keys are upper case letters starting from 'A' through to 'Z' (inclusive). The key value will only be updated if a random number is less than 10 or greater than 100. """ from memcache import Client import string, random, time servers = ["127.0.0.1:11211"] mc = Client(servers, debug=1) if __name__ == '__main__': for c in string.ascii_uppercase: mc.set(c, 0, time=0) while True: for c in string.ascii_uppercase: r = random.randint(0, 100) if r < 10 or r > 90: print('Setting value for {}'.format(c), end='') v = random.randint(0, 100) mc.set(c, v, time=0) print(' -> test: v={}'.format(mc.get(c))) print('*') time.sleep(1) # EOF
def setUp(self): server = ["127.0.0.1:11211"] self.cache = Client(server) self.user_id = 5
class MemcachedLoaderMixin(object): """ Uses a memcached server to cache the templates. Requires the memcache library from tummy__. __ http://www.tummy.com/Community/software/python-memcached/ """ def __init__(self, use_memcache, memcache_time=60 * 60 * 24 * 7, memcache_host=None, item_prefix='template/'): try: from memcache import Client except ImportError: raise RuntimeError('the %r loader requires an installed ' 'memcache module' % self.__class__.__name__) if memcache_host is None: memcache_host = ['127.0.0.1:11211'] if use_memcache: self.__memcache = Client(list(memcache_host)) self.__memcache_time = memcache_time else: self.__memcache = None self.__item_prefix = item_prefix self.__lock = Lock() def load(self, environment, name, translator): """ Load and translate a template. First we check if there is a cached version of this template in the memory cache. If this is not the cache check for a compiled template in the disk cache folder. And if none of this is the case we translate the template, cache and return it. """ self.__lock.acquire() try: # caching is only possible for the python translator. skip # all other translators if translator is not PythonTranslator: return super(MemcachedLoaderMixin, self).load(environment, name, translator) tmpl = None push_to_memory = False # check if we have something in the memory cache and the # memory cache is enabled. if self.__memcache is not None: bytecode = self.__memcache.get(self.__item_prefix + name) if bytecode: tmpl = Template.load(environment, bytecode) else: push_to_memory = True # if we still have no template we load, parse and translate it. if tmpl is None: tmpl = super(MemcachedLoaderMixin, self).load(environment, name, translator) # if memcaching is enabled and the template not loaded # we add that there. if push_to_memory: self.__memcache.set(self.__item_prefix + name, tmpl.dump(), self.__memcache_time) return tmpl finally: self.__lock.release()
userid = message._get_user_id() return users[userid]['name'] def direct_reply_to_message(message, text): channel_id = _open_n_get_message_channel_id(message) message._client.rtm_send_message(channel_id, text) def _open_n_get_message_channel_id(message): return (message._client.webapi.im.open( message._get_user_id()).body['channel']['id']) try: CACHE = Client(settings.MEMCACHED_SERVERS) CACHE_TTL = settings.MEMCACHED_TTL if settings.MEMCACHED_TTL else 0 except: CACHE = None def cache_get(key): """ Retrieves a value for a given key in cache. Alternatively you can implement your own caching here. Caching is optional and should return "None" if no caching is enabled. :param key: A string representing the key for a given key-value pair :return: The value for a given key-value pair. If it doesn't exists in cache it returns "None" Caching is optional and should return "None" if no caching is enabled """
def delete(self, key, **kwargs): return Client.delete(self, self.uniqeKey + str(key), **kwargs)
def set(self, key, val, **kwargs): return Client.set(self, self.uniqeKey + str(key), val, **kwargs)
class TestMemcache(unittest.TestCase): def setUp(self): # TODO(): unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1) def tearDown(self): self.mc.disconnect_all() def check_setget(self, key, val, noreply=False): self.mc.set(key, val, noreply=noreply) newval = self.mc.get(key) self.assertEqual(newval, val) def test_setget(self): self.check_setget("a_string", "some random string") self.check_setget("a_string_2", "some random string", noreply=True) self.check_setget("an_integer", 42) self.check_setget("an_integer_2", 42, noreply=True) def test_delete(self): self.check_setget("long", int(1 << 30)) result = self.mc.delete("long") self.assertEqual(result, True) self.assertEqual(self.mc.get("long"), None) def test_get_multi(self): self.check_setget("gm_a_string", "some random string") self.check_setget("gm_an_integer", 42) self.assertEqual(self.mc.get_multi(["gm_a_string", "gm_an_integer"]), { "gm_an_integer": 42, "gm_a_string": "some random string" }) def test_get_unknown_value(self): self.mc.delete("unknown_value") self.assertEqual(self.mc.get("unknown_value"), None) def test_setget_foostruct(self): f = FooStruct() self.check_setget("foostruct", f) self.check_setget("foostruct_2", f, noreply=True) def test_incr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.incr("i_an_integer", 1), 43) def test_incr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.incr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 43) def test_decr(self): self.check_setget("i_an_integer", 42) self.assertEqual(self.mc.decr("i_an_integer", 1), 41) def test_decr_noreply(self): self.check_setget("i_an_integer_2", 42) self.assertEqual(self.mc.decr("i_an_integer_2", 1, noreply=True), None) self.assertEqual(self.mc.get("i_an_integer_2"), 41) def test_sending_spaces(self): try: self.mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_control_characters(self): try: self.mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError as err: self.assertTrue("characters not allowed" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyCharacterError, nothing raised") def test_sending_key_too_long(self): try: self.mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError as err: self.assertTrue("length is >" in err.args[0]) else: self.fail( "Expected Client.MemcachedKeyLengthError, nothing raised") # These should work. self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) self.mc.set('a' * SERVER_MAX_KEY_LENGTH, 1, noreply=True) def test_setget_boolean(self): """GitHub issue #75. Set/get with boolean values.""" self.check_setget("bool", True) def test_unicode_key(self): s = six.u('\u4f1a') maxlen = SERVER_MAX_KEY_LENGTH // len(s.encode('utf-8')) key = s * maxlen self.mc.set(key, 5) value = self.mc.get(key) self.assertEqual(value, 5) def test_ignore_too_large_value(self): # NOTE: "MemCached: while expecting[...]" is normal... key = 'keyhere' value = 'a' * (SERVER_MAX_VALUE_LENGTH // 2) self.assertTrue(self.mc.set(key, value)) self.assertEqual(self.mc.get(key), value) value = 'a' * SERVER_MAX_VALUE_LENGTH self.assertFalse(self.mc.set(key, value)) # This test fails if the -I option is used on the memcached server self.assertTrue(self.mc.get(key) is None) def test_get_set_multi_key_prefix(self): """Testing set_multi() with no memcacheds running.""" prefix = 'pfx_' values = {'key1': 'a', 'key2': 'b'} errors = self.mc.set_multi(values, key_prefix=prefix) self.assertEqual(errors, []) keys = list(values) self.assertEqual(self.mc.get_multi(keys, key_prefix=prefix), values) def test_set_multi_dead_servers(self): """Testing set_multi() with no memcacheds running.""" self.mc.disconnect_all() for server in self.mc.servers: server.mark_dead('test') errors = self.mc.set_multi({'key1': 'a', 'key2': 'b'}) self.assertEqual(sorted(errors), ['key1', 'key2']) def test_disconnect_all_delete_multi(self): """Testing delete_multi() with no memcacheds running.""" self.mc.disconnect_all() ret = self.mc.delete_multi({'keyhere': 'a', 'keythere': 'b'}) self.assertEqual(ret, 1)
def check_connect(self): try: self.connection.ping() except BaseException as ex: logging.error(ex) self.connection() # 钉钉会话管理,Mysql支持 # session_manager = MySQLSessionManager(host=DING_SESSION_HOST, port=DING_SESSION_PORT, # user=DING_SESSION_USER, pass_=DING_SESSION_PASS, # db=DING_SESSION_DB) # 钉钉会话管理,Memcached支持 from memcache import Client session_manager = Client(current_config.CACHE_MEMCACHED_SERVERS) # 钉钉会话管理,Redis支持 # import redis # session_manager = redis.Redis(host=current_config.CACHE_REDIS_SERVERS, # port=current_config.CACHE_REDIS_PORT, # db=current_config.CACHE_REDIS_DB) # 这里选择从配置文件读取设定的缓存对象 # session_manager = current_config.DING_SESSION_MANAGER # 实例化一个钉钉的对象 dd_config = { 'corp_id': CORP_ID, 'corp_secret': CORP_SECRET, 'agent_id': AGENT_ID,
from os.path import exists from memcache import Client from sekrit import MEMCACHE_SOCKET if exists(MEMCACHE_SOCKET): CACHE = Client(['unix:' + MEMCACHE_SOCKET], debug=True) else: CACHE = Client(['127.0.0.1:11213'], debug=True) S = lambda key, value: CACHE.set(str(key), value) G = lambda key: CACHE.get(str(key)) def store_dec(s): def inner(tag, url): s(tag, url) S(tag, url) return inner def retrieve_dec(r): def inner(tag): url = G(tag) if url is None: url = r(tag) S(tag, url) return url return inner
class XmlSpider(Spider): name = 'run_delta' allowed_domains = [] def read_seeds(self, file, type): seeds = [] ok = 0 error = 0 try: for line in open(file, "r"): self.logger.info("[read_seeds] handle line:" + line) parts = line.split() if len(parts) != 2: self.logger.error("[read_seeds] parts error line:%s" % line) error = error + 1 continue url = parts[0].strip() cate = parts[1].strip() if cate not in ('1', '2', '3', '4', '5', '6', '7', '9'): self.logger.error("[read_seeds] cate error line:%s" % line) error = error + 1 continue ok = ok + 1 seeds.append((url, cate, type)) except: return False self.logger.info("[read_seeds] finish, ok:%d,error:%d,file:%s" % (ok, error, file)) return seeds def __init__(self, *args, **kargs): super(XmlSpider, self).__init__(*args, **kargs) xmlfile = kargs.get("xml", "") indexfile = kargs.get("index", "") self.nofilter = (kargs.get("nofilter", "") == "true") self.start_urls = [] if not xmlfile and not indexfile: raise ValueError( "Seeds file error: Must specify seeds file! exam: scrapy crawl **spider -a xml=seeds.txt -a index=seeds2.txt [-a nofilter=true or false], xml and index at least has 1\n\t\tfile format:url\tcate" ) if xmlfile: seeds = self.read_seeds(xmlfile, "xml") if seeds == False: raise ValueError("read file error:" + xmlfile) self.start_urls.extend(seeds) if indexfile: seeds = self.read_seeds(indexfile, "index") if seeds == False: raise ValueError("read file error:" + indexfile) self.start_urls.extend(seeds) if not self.start_urls: raise ValueError("Empty seeds") self.logger.info("nofilter:%s,start urls:%s" % (self.nofilter, self.start_urls)) self.mc = Client(['127.0.0.1:11211']) def start_requests(self): print(self.start_urls) for (url, cate, type) in self.start_urls: yield Request(tools.add_timestamp(url), meta={ 'cate': cate, 'type': type }) def extract_text(self, item, field_name): tmp = item.xpath('%(field_name)s/text()' % locals()) if tmp: value = tmp[0].extract().strip() return oneline(value) def extract_join_text(self, item, field_name): L = [] for tmp in item.xpath('%(field_name)s/text()' % locals()).extract(): tmp = tmp.strip() if tmp: L.append(tmp) if L: return oneline(u';'.join(L)) def is_response_changed(self, response): #if 'mgtv' in response.url: # return True #if tools.del_timestamp(response.url) == "http://www.mgtv.com/xml/sogou/pay/dianshiju_add.xml": # return True if self.nofilter: self.logger.info( "[is_response_changed] nofilter = True, force response changed, url:%s" % (response.url)) return True body_md5 = str2md5(response.body) #self.logger.info("[is_response_changed] url:%s, body: %s, new_size: %s" % (tools.del_timestamp(response.url), response.body, len(response.body))) self.logger.info( "[is_response_changed] url:%s, new_size: %s" % (tools.del_timestamp(response.url), len(response.body))) old_body_md5 = self.mc.get(tools.del_timestamp(response.url)) if old_body_md5 == body_md5: self.logger.info( "[is_response_changed] url:%s,same md5:%s,size:%s" % (tools.del_timestamp(response.url), body_md5, len( response.body))) return False #self.mc.set(tools.del_timestamp(response.url), body_md5) self.logger.info( "[is_response_changed] url:%s,diff md5, new:%s,old:%s, new_size:%s" % (tools.del_timestamp(response.url), body_md5, str(old_body_md5), len(response.body))) return True def parse(self, response): #print type(response.body) t0 = time.time() urltype = response.request.meta["type"] if urltype == 'xml': #need judge whether the whole file changed if not self.is_response_changed(response): dt = (time.time() - t0) * 1000 self.logger.info('[parse] [%s] [not change] %s dt=%.3f ms' % (response.url, urltype, dt)) return #bring self.nofilter to pipelines handler = MyGeneralHandler(response.request.meta['cate'], response.url, self.nofilter) try: if response.encoding == 'utf-8': xml.sax.parseString(response.body, handler) else: xml.sax.parseString( response.body.decode(response.encoding, 'ignore').encode("utf-8").replace( "gbk", "utf-8").replace( "gb18030", "utf-8"), handler) except Exception as e: logging.error( "[PARSE EXCEPTION][sax first] url:|%s|,error:|%s|,encoding:%s,size:|||%s" % (response.url, e, response.encoding, len(response.body))) if 'mgtv' in response.url: try: handler = MyGeneralHandler(response.request.meta['cate'], response.url, self.nofilter) r = requests.get(response.url) xml.sax.parseString(r.content, handler) except Exception as e: logging.error( "[PARSE EXCEPTION][sax second] url:|%s|,error:|%s|,encoding:%s,content:|||%s" % (response.url, e, response.encoding, len(r.content))) dt = (time.time() - t0) * 1000 if handler.items and not handler.items[0]: self.logger.info('[parse] [%s] type:%s,find NONE,%s' % (response.url, str(handler.items))) self.logger.info( '[open_delta] [parse] [%s] type:%s,item_num:%s,cost:%d' % (response.url, urltype, len(handler.items), dt)) return handler.items def parse_old(self, response): t0 = time.time() items = [] # extract returns unicode for loc in response.xpath('//sitemap/loc/text()').extract(): loc = loc.strip() new_task = Request(loc) #self.logger.info('#new_task: ' + repr(new_task)) items.append(new_task) if not self.is_response_changed(response): dt = (time.time() - t0) * 1000 self.logger.info('#parse: [cached] %s %s dt=%.3f ms' % (response.url, len(items), dt)) return items for item in response.xpath('/urlset/url'): D_url = {} names = [ 'workName', 'director', 'region', 'type', 'showTime', 'hot', 'score', 'language', 'introduction', 'definition', 'totalnumber', 'seasonId', 'serialName', 'otherName', 'source_type', 'status', 'nowEpisode' ] for name in names: v = self.extract_text(item, name) if v: D_url[name] = v names_map = { 'update_info': 'update', 'site_updateTime': 'updateTime', 'poster': 'imageLink', 'setAddress': 'listLink', 'horizontalPoster': 'imageLink2', } for name, field in names_map.items(): v = self.extract_text(item, field) if v: D_url[name] = v v = self.extract_text(item, 'introduction') if v: D_url['introduction'] = self.extract_text(item, 'introduction') v = self.extract_join_text(item, 'starrings/starring/name') if v: D_url['starring'] = v v = self.extract_join_text(item, 'starrings/starring/role') if v: D_url['role'] = v for detail in item.xpath('detail'): D_detail = D_url.copy() detail_names = [ 'seq', 'singleTitle', 'singleLink', 'mplayLink', 'singleIntroduction', 'swfUrl', 'resourceTime', 'singleThumbnails', 'captionAvailable', 'vip' ] for name in detail_names: v = self.extract_text(detail, name) if v: D_detail[name] = v v = self.extract_text(detail, 'length') if v: D_detail['timeOfSingle'] = v v = self.extract_text(item, 'singleIntroduction') if v: D_detail['singleIntroduction'] = v # url of the doc D_detail['__url__'] = D_detail['singleLink'] D_detail['__collection__'] = 'teleplay_open' items.append(D_detail) dt = (time.time() - t0) * 1000 self.logger.info('#parse: %s %s dt=%.3f ms' % (response.url, len(items), dt)) return items
from flask_mail import Mail from flask_sqlalchemy import SQLAlchemy from memcache import Client client = Client(['127.0.0.1:11211']) db = SQLAlchemy() email = Mail()
def get(self, key): return Client.get(self, self.uniqeKey + str(key))
def setUp(self): self.status = locals() self.address = ("127.0.0.1", 11213) self._start_stub_server() self.client = Client(["127.0.0.1:11213"], debug=1)
class PrintFavicon(BaseHandler): def __init__(self): super(PrintFavicon, self).__init__() default_icon_data = self.open(DEFAULT_FAVICON_LOC, time()).read() self.default_icon = Icon(data=default_icon_data, location=DEFAULT_FAVICON_LOC, type=DEFAULT_FAVICON_TYPE) self.env = Environment(loader=FileSystemLoader( os.path.join(cherrypy.config['favicon.root'], 'templates'))) self.mc = Client( ['%(memcache.host)s:%(memcache.port)d' % cherrypy.config], debug=2) # Initialize counters for counter in ['requests', 'hits', 'defaults']: self.mc.add('counter-%s' % counter, '0') def open(self, url, start, headers=None): time_spent = int(time() - start) if time_spent >= TIMEOUT: raise TimeoutError(time_spent) if not headers: headers = dict() headers.update({ 'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; ' + 'rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13' }) opener = build_opener(HTTPRedirectHandler(), HTTPCookieProcessor()) return opener.open(Request(url, headers=headers), timeout=min(CONNECTION_TIMEOUT, TIMEOUT - time_spent)) def validateIconResponse(self, iconResponse): if iconResponse.getcode() != 200: cherrypy.log('Non-success response:%d fetching url:%s' % \ (iconResponse.getcode(), iconResponse.geturl()), severity=INFO) return None iconContentType = iconResponse.info().gettype() if iconContentType in ICON_MIMETYPE_BLACKLIST: cherrypy.log('Url:%s favicon content-Type:%s blacklisted' % \ (iconResponse.geturl(), iconContentType), severity=INFO) return None icon = iconResponse.read() iconLength = len(icon) if iconLength == 0: cherrypy.log('Url:%s null content length' % iconResponse.geturl(), severity=INFO) return None if iconLength < MIN_ICON_LENGTH or iconLength > MAX_ICON_LENGTH: # Issue warning, but accept nonetheless! cherrypy.log('Warning: url:%s favicon size:%d out of bounds' % \ (iconResponse.geturl(), iconLength), severity=INFO) return Icon(data=icon, type=iconContentType) # Icon at [domain]/favicon.ico? def iconAtRoot(self, targetDomain, start): cherrypy.log('Attempting to locate favicon for domain:%s at root' % \ targetDomain, severity=INFO) rootIconPath = targetDomain + '/favicon.ico' try: rootDomainFaviconResult = self.open(rootIconPath, start) rootIcon = self.validateIconResponse(rootDomainFaviconResult) if rootIcon: cherrypy.log('Found favicon for domain:%s at root' % targetDomain, severity=INFO) self.cacheIcon(targetDomain, rootIcon.data, rootIconPath) rootIcon.location = rootIconPath return rootIcon except: cherrypy.log('Error fetching favicon at domain root:%s, err:%s, msg:%s' % \ (targetDomain, sys.exc_info()[0], sys.exc_info()[1]), severity=INFO) # Icon specified in page? def iconInPage(self, targetDomain, targetPath, start, refresh=True): cherrypy.log('Attempting to locate embedded favicon link in page:%s' % \ targetPath, severity=INFO) try: rootDomainPageResult = self.open(targetPath, start) if rootDomainPageResult.getcode() == 200: pageSoup = BeautifulSoup(rootDomainPageResult.read()) pageSoupIcon = pageSoup.find( 'link', rel=compile('^(shortcut|icon|shortcut icon)$', IGNORECASE)) if pageSoupIcon: pageIconHref = pageSoupIcon.get('href') if pageIconHref: pageIconPath = urljoin(targetPath, pageIconHref) cherrypy.log('Found embedded favicon link:%s for domain:%s' % \ (pageIconPath, targetDomain), severity=INFO) cookies = rootDomainPageResult.headers.getheaders( "Set-Cookie") headers = None if cookies: headers = {'Cookie': ';'.join(cookies)} pagePathFaviconResult = self.open(pageIconPath, start, headers=headers) pageIcon = self.validateIconResponse( pagePathFaviconResult) if pageIcon: cherrypy.log('Found favicon at:%s for domain:%s' % \ (pageIconPath, targetDomain), severity=INFO) self.cacheIcon(targetDomain, pageIcon.data, pageIconPath) pageIcon.location = pageIconPath return pageIcon else: if refresh: for meta in pageSoup.findAll('meta'): if meta.get('http-equiv', '').lower() == 'refresh': match = search('url=([^;]+)', meta.get('content', ''), flags=IGNORECASE) if match: refreshPath = urljoin( rootDomainPageResult.geturl(), match.group(1)) cherrypy.log('Processing refresh directive:%s for domain:%s' % \ (refreshPath, targetDomain), severity=INFO) return self.iconInPage(targetDomain, refreshPath, start, refresh=False) cherrypy.log('No link tag found:%s' % targetPath, severity=INFO) else: cherrypy.log('Non-success response:%d for url:%s' % \ (rootDomainPageResult.getcode(), targetPath), severity=INFO) except: cherrypy.log('Error extracting favicon from page:%s, err:%s, msg:%s' % \ (targetPath, sys.exc_info()[0], sys.exc_info()[1]), severity=WARNING) def cacheIcon(self, domain, icon, loc): cherrypy.log('Caching icon at location:%s for domain:%s' % (loc, domain), severity=INFO) if not self.mc.set('icon-%s' % domain, icon, time=MC_CACHE_TIME): cherrypy.log('Could not cache icon for domain:%s' % domain, severity=ERROR) def iconInCache(self, targetDomain, start): icon = self.mc.get('icon-%s' % targetDomain) if icon: self.mc.incr('counter-hits') cherrypy.log('Cache hit:%s' % targetDomain, severity=INFO) cherrypy.response.headers['X-Cache'] = 'Hit' if icon == 'DEFAULT': self.mc.incr('counter-defaults') cherrypy.response.headers['X-Cache'] = 'Hit' return self.default_icon else: return Icon(data=icon) def writeIcon(self, icon): self.writeHeaders(icon) return icon.data def writeHeaders(self, icon, fmt='%a, %d %b %Y %H:%M:%S %z'): # MIME Type cherrypy.response.headers['Content-Type'] = icon.type or 'image/x-icon' # Set caching headers cherrypy.response.headers['Cache-Control'] = 'public, max-age=2592000' cherrypy.response.headers['Expires'] = \ (datetime.now() + timedelta(days=30)).strftime(fmt) def parse(self, url): # Get page path targetPath = self.urldecode(url) if not targetPath.startswith('http'): targetPath = 'http://%s' % targetPath cherrypy.log('Decoded URL:%s' % targetPath, severity=INFO) # Split path to get domain targetURL = urlparse(targetPath) if not targetURL or not targetURL.scheme or not targetURL.netloc: raise cherrypy.HTTPError(400, 'Malformed URL:%s' % url) targetDomain = '%s://%s' % (targetURL.scheme, targetURL.netloc) cherrypy.log('URL:%s, domain:%s' % (targetPath, targetDomain), severity=INFO) return (targetPath, targetDomain) @cherrypy.expose def index(self): status = {'status': 'ok', 'counters': dict()} for counter in ['requests', 'hits', 'defaults']: status['counters'][counter] = self.mc.get('counter-%s' % counter) return json.dumps(status) @cherrypy.expose def test(self): topSites = open( os.path.join(cherrypy.config['favicon.root'], 'topsites.txt'), 'r').read().split() template = self.env.get_template('test.html') return template.render(topSites=topSites) @cherrypy.expose def clear(self, url): cherrypy.log('Incoming cache invalidation request:%s' % url, severity=INFO) targetPath, targetDomain = self.parse(str(url)) self.mc.delete('icon_loc-%s' % targetDomain) cherrypy.log('Evicted cache entry for %s' % targetDomain, severity=INFO) @cherrypy.expose def s(self, url, skipCache='false'): start = time() if skipCache.lower() == 'true': skipCache = True else: skipCache = False cherrypy.log('Incoming request:%s (skipCache=%s)' % (url, skipCache), severity=INFO) self.mc.incr('counter-requests') targetPath, targetDomain = self.parse(str(url)) icon = (not skipCache and self.iconInCache(targetDomain, start)) or \ self.iconInPage(targetDomain, targetPath, start) or \ self.iconAtRoot(targetDomain, start) if not icon: cherrypy.log('Falling back to default icon for:%s' % targetDomain, severity=INFO) self.cacheIcon(targetDomain, 'DEFAULT', 'DEFAULT_LOC') self.mc.incr('counter-defaults') icon = self.default_icon cherrypy.log('Time taken to process domain:%s %f' % \ (targetDomain, time() - start), severity=INFO) return self.writeIcon(icon)
def setUp(self): # TODO(): unix socket server stuff servers = ["127.0.0.1:11211"] self.mc = Client(servers, debug=1)
class TestCacheDecoratorWithMemcache(unittest.TestCase): def setUp(self): server = ["127.0.0.1:11211"] self.cache = Client(server) self.user_id = 5 def test_cache_is_empty(self): value = self.cache.get(str(self.user_id)) self.assertTrue(value is None) def test_get_long_response_set_value_to_cache(self): cached, result = get_long_response(self.user_id) self.assertTrue(cached is False) self.assertTrue(result == self.cache.get(str(self.user_id))) def test_get_long_response_gets_value_from_cache(self): cached_first_call, result_first_call = get_long_response(self.user_id) cached_second_call, result_second_call = get_long_response( self.user_id) self.assertTrue(cached_second_call is True) self.assertTrue(result_second_call == result_first_call) def test_get_long_response_uses_user_id_as_unique_key(self): another_user_id = 7 cached_user1_1, result_user1_1 = get_long_response(self.user_id) cached_user1_2, result_user1_2 = get_long_response(self.user_id) cached_user2_1, result_user2_1 = get_long_response(another_user_id) cached_user2_2, result_user2_2 = get_long_response(another_user_id) self.assertTrue(cached_user1_2 is True) self.assertTrue(cached_user2_2 is True) self.assertTrue(result_user1_1 == result_user1_2) self.assertTrue(result_user2_1 == result_user2_2) self.assertFalse(result_user2_2 == result_user1_2) # remove entry in memcached. self.cache.delete(str(7)) def test_delete_entry_in_cache_from_outside(self): cached, result = get_long_response(self.user_id) self.assertTrue(cached is False) value_in_cache = self.cache.get(str(self.user_id)) self.assertTrue(result == value_in_cache) self.cache.delete(str(self.user_id)) self.assertTrue(self.cache.get(str(self.user_id)) is None) def tearDown(self): self.cache.delete(str(self.user_id)) self.cache.disconnect_all()
from memcache import Client """ 安装memcache时,遇到的问题,提示libevent 解决 yum install libevent yum install libevent-devel """ # 这里是个list,可以吧memcache集群这么搞 MC_SERVERS = ['192.168.52.3:11211', '192.168.52.3:11212'] CONN = Client(MC_SERVERS) status = CONN.set('key1', 'val2', 0) print(status) status = CONN.delete('key') print(status) status = CONN.add('key', 'val', 20) print(status) status = CONN.replace('key', 'val1', 0) print(status) status = CONN.append('key', ',val2') print(status) data = CONN.get('key') print(data)
def __init__(self, address): from memcache import Client self.conn = Client([address])
#Settings #button = 18 # GPIO Pin with button connected button = port.PA20 # GPIO Pin with button connected #plb_light = 24 # GPIO Pin for the playback/activity light plb_light = port.PA9 # GPIO Pin for the playback/activity light #rec_light = 25 # GPIO Pin for the recording light rec_light = port.PA8 # GPIO Pin for the recording light lights = [plb_light, rec_light] # GPIO Pins with LED's connected #device = "plughw:1" # Name of your microphone/sound card in arecord -L device = "plughw:audiocodec" # Name of your microphone/sound card in arecord -L playlists = set(['pls', 'm3u', 'ash']) #Setup recorded = False servers = ["127.0.0.1:11211"] mc = Client(servers, debug=1) path = os.path.realpath(__file__).rstrip(os.path.basename(__file__)) #Variables p = "" nav_token = "" streamurl = "" streamid = "" position = 0 audioplaying = False #Debug debug = 0 class bcolors:
class MemcachedCacheClient(CacheClient): """Memcached cache client implementation.""" def __init__(self, config): super(MemcachedCacheClient, self).__init__(config["host"], config["port"], config["cache"]) self.config = config if self.cache_name != DEFAULT_CACHE_NAME: print "WARNING: memcached client doesn't support named caches. cache_name config value will be ignored and default cache will be used instead." self.memcached_client = Client([self.host + ':' + self.port], debug=0) return def put(self, key, value, version=None, lifespan=None, max_idle=None, put_if_absent=False): time = 0 if lifespan != None: if lifespan > MEMCACHED_LIFESPAN_MAX_SECONDS: self._error("Memcached cache client supports lifespan values only up to %s seconds (30 days)." % MEMCACHED_LIFESPAN_MAX_SECONDS) time = lifespan if max_idle != None: self._error("Memcached cache client doesn't support max idle time setting.") try: if (version == None): if (put_if_absent): if not self.memcached_client.add(key, value, time, 0): # current python-memcached doesn't recoginze these states # if self.memcached_client.last_set_status == "NOT_STORED": # raise ConflictError # else: # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful. Possibly CONFLICT.") else: if not self.memcached_client.set(key, value, time, 0): # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful.") else: try: self.memcached_client.cas_ids[key] = int(version) except ValueError: self._error("Please provide an integer version.") if not self.memcached_client.cas(key, value, time, 0): # if self.memcached_client.last_set_status == "EXISTS": # raise ConflictError # if self.memcached_client.last_set_status == "NOT_FOUND": # raise NotFoundError # else: # self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) self._error("Operation unsuccessful. Possibly CONFLICT, NOT_FOUND.") except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e) def get(self, key, get_version=False): try: if get_version: val = self.memcached_client.gets(key) if val == None: raise NotFoundError version = self.memcached_client.cas_ids[key] if version == None: self._error("Couldn't obtain version info from memcached server.") return version, val else: val = self.memcached_client.get(key) if val == None: raise NotFoundError return val except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args) def delete(self, key, version=None): try: if version: self._error("versioned delete operation not available for memcached client") if self.memcached_client.delete(key, 0): if self.memcached_client.last_set_status == "NOT_FOUND": raise NotFoundError else: self._error("Operation unsuccessful. " + self.memcached_client.last_set_status) except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args) def clear(self): try: self.memcached_client.flush_all() except CacheClientError as e: raise e #rethrow except Exception as e: self._error(e.args)