class MemcacheHashCli(object): def __init__(self, hosts): self.client = HashClient(hosts) def set(self, key, value, expire): try: return self.client.set(key, value, expire) except Exception as e: return False def get(self, key): try: return self.client.get(key, default=None) except Exception as e: return None def mset(self, values, expire): try: return self.client.set_many(values, expire) except Exception as e: return False def mget(self, keys): try: return self.client.get_many(keys) except Exception as e: return None
def test_no_servers_left_with_get_many(self): from pymemcache.client.hash import HashClient client = HashClient( [], use_pooling=True, ignore_exc=True, timeout=1, connect_timeout=1 ) result = client.get_many(['foo', 'bar']) assert result == {'foo': False, 'bar': False}
class MemcacheService(apiproxy_stub.APIProxyStub): """Python only memcache service. This service keeps all data in any external servers running memcached. """ # The memcached default port. MEMCACHE_PORT = 11211 # An AppScale file which has a list of IPs running memcached. APPSCALE_MEMCACHE_FILE = "/etc/appscale/memcache_ips" def __init__(self, project_id, service_name='memcache'): """Initializer. Args: service_name: Service name expected for all calls. """ super(MemcacheService, self).__init__(service_name) self._memcache = None self.setupMemcacheClient() self._methods = { MemcacheSetRequest.SET: self._memcache.set, MemcacheSetRequest.ADD: self._memcache.add, MemcacheSetRequest.REPLACE: self._memcache.replace, MemcacheSetRequest.CAS: self._memcache.cas } self._project_id = project_id def setupMemcacheClient(self): """ Sets up the memcache client. """ if os.path.exists(self.APPSCALE_MEMCACHE_FILE): memcache_file = open(self.APPSCALE_MEMCACHE_FILE, "r") all_ips = memcache_file.read().split("\n") memcache_file.close() else: all_ips = ['localhost'] memcaches = [(ip, self.MEMCACHE_PORT) for ip in all_ips if ip] memcaches.sort() self._memcache = HashClient(memcaches, serializer=serializer, deserializer=deserializer, connect_timeout=5, timeout=1, use_pooling=True) # The GAE API expects return values for all mutate operations. for client in six.itervalues(self._memcache.clients): client.default_noreply = False def _Dynamic_Get(self, request, response): """Implementation of gets for memcache. Args: request: A MemcacheGetRequest protocol buffer. response: A MemcacheGetResponse protocol buffer. """ # Remove duplicate keys. original_keys = { encode_key(self._project_id, request.name_space(), key): key for key in request.key_list() } try: backend_response = self._memcache.get_many(original_keys.keys(), gets=request.for_cas()) except MemcacheClientError as error: raise apiproxy_errors.ApplicationError(INVALID_VALUE, str(error)) except TRANSIENT_ERRORS as error: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Transient memcache error: {}'.format(error)) for encoded_key, value_tuple in six.iteritems(backend_response): item = response.add_item() item.set_key(original_keys[encoded_key]) if request.for_cas(): item.set_cas_id(int(value_tuple[1])) value_tuple = value_tuple[0] item.set_value(value_tuple[0]) item.set_flags(value_tuple[1]) def _Dynamic_Set(self, request, response): """Implementation of sets for memcache. Args: request: A MemcacheSetRequest. response: A MemcacheSetResponse. """ namespace = request.name_space() if any(item.set_policy() not in self._methods for item in request.item_list()): raise apiproxy_errors.ApplicationError(INVALID_VALUE, 'Unsupported set_policy') if not all(item.has_cas_id() for item in request.item_list() if item.set_policy() == MemcacheSetRequest.CAS): raise apiproxy_errors.ApplicationError( INVALID_VALUE, 'All CAS items must have a cas_id') for item in request.item_list(): try: encoded_key = encode_key(self._project_id, namespace, item.key()) except apiproxy_errors.ApplicationError: response.add_set_status(MemcacheSetResponse.ERROR) continue args = { 'key': encoded_key, 'value': (item.value(), item.flags()), 'expire': int(item.expiration_time()) } is_cas = item.set_policy() == MemcacheSetRequest.CAS if is_cas: args['cas'] = six.binary_type(item.cas_id()) try: backend_response = self._methods[item.set_policy()](**args) except (TRANSIENT_ERRORS + (MemcacheClientError, )): response.add_set_status(MemcacheSetResponse.ERROR) continue if backend_response: response.add_set_status(MemcacheSetResponse.STORED) continue if is_cas and backend_response is False: response.add_set_status(MemcacheSetResponse.EXISTS) continue response.add_set_status(MemcacheSetResponse.NOT_STORED) def _Dynamic_Delete(self, request, response): """Implementation of delete in memcache. Args: request: A MemcacheDeleteRequest protocol buffer. response: A MemcacheDeleteResponse protocol buffer. """ for item in request.item_list(): encoded_key = encode_key(self._project_id, request.name_space(), item.key()) try: key_existed = self._memcache.delete(encoded_key) except MemcacheClientError as error: raise apiproxy_errors.ApplicationError(INVALID_VALUE, str(error)) except TRANSIENT_ERRORS as error: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Transient memcache error: {}'.format(error)) response.add_delete_status( MemcacheDeleteResponse. DELETED if key_existed else MemcacheDeleteResponse.NOT_FOUND) def _Increment(self, namespace, request): """Internal function for incrementing from a MemcacheIncrementRequest. Args: namespace: A string containing the namespace for the request, if any. Pass an empty string if there is no namespace. request: A MemcacheIncrementRequest instance. Returns: An integer indicating the new value. Raises: ApplicationError if unable to perform the mutation. """ encoded_key = encode_key(self._project_id, namespace, request.key()) method = self._memcache.incr if request.direction() == MemcacheIncrementRequest.DECREMENT: method = self._memcache.decr try: response = method(encoded_key, request.delta()) except MemcacheClientError as error: raise apiproxy_errors.ApplicationError(INVALID_VALUE, str(error)) except TRANSIENT_ERRORS as error: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Transient memcache error: {}'.format(error)) if response is None and not request.has_initial_value(): raise apiproxy_errors.ApplicationError(UNSPECIFIED_ERROR, 'Key does not exist') if response is not None: return response # If the key was not present and an initial value was provided, perform # the mutation client-side and set the key if it still doesn't exist. flags = 0 if request.has_initial_flags(): flags = request.initial_flags() if request.direction() == MemcacheIncrementRequest.INCREMENT: updated_val = request.initial_value() + request.delta() else: updated_val = request.initial_value() - request.delta() updated_val = max(updated_val, 0) % (MAX_INCR + 1) try: response = self._memcache.add( encoded_key, (six.binary_type(updated_val), flags)) except (TRANSIENT_ERRORS + (MemcacheClientError, )): raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Unable to set initial value') if response is False: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Unable to set initial value') return updated_val def _Dynamic_Increment(self, request, response): """Implementation of increment for memcache. Args: request: A MemcacheIncrementRequest protocol buffer. response: A MemcacheIncrementResponse protocol buffer. """ new_value = self._Increment(request.name_space(), request) response.set_new_value(new_value) def _Dynamic_BatchIncrement(self, request, response): """Implementation of batch increment for memcache. Args: request: A MemcacheBatchIncrementRequest protocol buffer. response: A MemcacheBatchIncrementResponse protocol buffer. """ for request_item in request.item_list(): item = response.add_item() try: new_value = self._Increment(request.name_space(), request_item) except apiproxy_errors.ApplicationError as error: if error.application_error == INVALID_VALUE: item.set_increment_status( MemcacheIncrementResponse.NOT_CHANGED) else: item.set_increment_status(MemcacheIncrementResponse.ERROR) continue item.set_increment_status(MemcacheIncrementResponse.OK) item.set_new_value(new_value) def _Dynamic_FlushAll(self, request, response): """Implementation of MemcacheService::FlushAll(). Args: request: A MemcacheFlushRequest. response: A MemcacheFlushResponse. """ # TODO: Prevent a project from clearing another project's namespace. self._memcache.flush_all() def _Dynamic_Stats(self, request, response): """Implementation of MemcacheService::Stats(). Args: request: A MemcacheStatsRequest. response: A MemcacheStatsResponse. """ # TODO: Gather stats for a project rather than the deployment. hits = 0 misses = 0 byte_hits = 0 items = 0 byte_count = 0 oldest_item_age = 0 for server in six.itervalues(self._memcache.clients): server_stats = server.stats() hits += server_stats.get('get_hits', 0) misses += server_stats.get('get_misses', 0) byte_hits += server_stats.get('bytes_read', 0) items += server_stats.get('curr_items', 0) byte_count += server_stats.get('bytes', 0) # Using the "age" field may not be correct here. The GAE docs claim this # should specify "how long in seconds since the oldest item in the cache # was accessed" rather than when it was created. item_stats = server.stats('items') oldest_server_item = max(age for key, age in six.iteritems(item_stats) if key.endswith(':age')) oldest_item_age = max(oldest_item_age, oldest_server_item) stats = response.mutable_stats() stats.set_hits(hits) stats.set_misses(misses) stats.set_byte_hits(byte_hits) stats.set_items(items) stats.set_bytes(byte_count) stats.set_oldest_item_age(oldest_item_age)
class MemcacheService(apiproxy_stub.APIProxyStub): """Python only memcache service. This service keeps all data in any external servers running memcached. """ # The memcached default port. MEMCACHE_PORT = 11211 # An AppScale file which has a list of IPs running memcached. APPSCALE_MEMCACHE_FILE = "/etc/appscale/memcache_ips" def __init__(self, project_id, service_name='memcache'): """Initializer. Args: service_name: Service name expected for all calls. """ super(MemcacheService, self).__init__(service_name) self._memcache = None self.setupMemcacheClient() self._methods = {MemcacheSetRequest.SET: self._memcache.set, MemcacheSetRequest.ADD: self._memcache.add, MemcacheSetRequest.REPLACE: self._memcache.replace, MemcacheSetRequest.CAS: self._memcache.cas} self._project_id = project_id def setupMemcacheClient(self): """ Sets up the memcache client. """ if os.path.exists(self.APPSCALE_MEMCACHE_FILE): memcache_file = open(self.APPSCALE_MEMCACHE_FILE, "r") all_ips = memcache_file.read().split("\n") memcache_file.close() else: all_ips = ['localhost'] memcaches = [(ip, self.MEMCACHE_PORT) for ip in all_ips if ip] memcaches.sort() self._memcache = HashClient( memcaches, serializer=serializer, deserializer=deserializer, connect_timeout=5, timeout=1, use_pooling=True) # The GAE API expects return values for all mutate operations. for client in six.itervalues(self._memcache.clients): client.default_noreply = False def _Dynamic_Get(self, request, response): """Implementation of gets for memcache. Args: request: A MemcacheGetRequest protocol buffer. response: A MemcacheGetResponse protocol buffer. """ # Remove duplicate keys. original_keys = { encode_key(self._project_id, request.name_space(), key): key for key in request.key_list()} try: backend_response = self._memcache.get_many( original_keys.keys(), gets=request.for_cas()) except MemcacheClientError as error: raise apiproxy_errors.ApplicationError(INVALID_VALUE, str(error)) except TRANSIENT_ERRORS as error: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Transient memcache error: {}'.format(error)) for encoded_key, value_tuple in six.iteritems(backend_response): item = response.add_item() item.set_key(original_keys[encoded_key]) if request.for_cas(): item.set_cas_id(int(value_tuple[1])) value_tuple = value_tuple[0] item.set_value(value_tuple[0]) item.set_flags(value_tuple[1]) def _Dynamic_Set(self, request, response): """Implementation of sets for memcache. Args: request: A MemcacheSetRequest. response: A MemcacheSetResponse. """ namespace = request.name_space() if any(item.set_policy() not in self._methods for item in request.item_list()): raise apiproxy_errors.ApplicationError( INVALID_VALUE, 'Unsupported set_policy') if not all(item.has_cas_id() for item in request.item_list() if item.set_policy() == MemcacheSetRequest.CAS): raise apiproxy_errors.ApplicationError( INVALID_VALUE, 'All CAS items must have a cas_id') for item in request.item_list(): try: encoded_key = encode_key(self._project_id, namespace, item.key()) except apiproxy_errors.ApplicationError: response.add_set_status(MemcacheSetResponse.ERROR) continue args = {'key': encoded_key, 'value': (item.value(), item.flags()), 'expire': int(item.expiration_time())} is_cas = item.set_policy() == MemcacheSetRequest.CAS if is_cas: args['cas'] = six.binary_type(item.cas_id()) try: backend_response = self._methods[item.set_policy()](**args) except (TRANSIENT_ERRORS + (MemcacheClientError,)): response.add_set_status(MemcacheSetResponse.ERROR) continue if backend_response: response.add_set_status(MemcacheSetResponse.STORED) continue if is_cas and backend_response is False: response.add_set_status(MemcacheSetResponse.EXISTS) continue response.add_set_status(MemcacheSetResponse.NOT_STORED) def _Dynamic_Delete(self, request, response): """Implementation of delete in memcache. Args: request: A MemcacheDeleteRequest protocol buffer. response: A MemcacheDeleteResponse protocol buffer. """ for item in request.item_list(): encoded_key = encode_key(self._project_id, request.name_space(), item.key()) try: key_existed = self._memcache.delete(encoded_key) except MemcacheClientError as error: raise apiproxy_errors.ApplicationError(INVALID_VALUE, str(error)) except TRANSIENT_ERRORS as error: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Transient memcache error: {}'.format(error)) response.add_delete_status(MemcacheDeleteResponse.DELETED if key_existed else MemcacheDeleteResponse.NOT_FOUND) def _Increment(self, namespace, request): """Internal function for incrementing from a MemcacheIncrementRequest. Args: namespace: A string containing the namespace for the request, if any. Pass an empty string if there is no namespace. request: A MemcacheIncrementRequest instance. Returns: An integer indicating the new value. Raises: ApplicationError if unable to perform the mutation. """ encoded_key = encode_key(self._project_id, namespace, request.key()) method = self._memcache.incr if request.direction() == MemcacheIncrementRequest.DECREMENT: method = self._memcache.decr try: response = method(encoded_key, request.delta()) except MemcacheClientError as error: raise apiproxy_errors.ApplicationError(INVALID_VALUE, str(error)) except TRANSIENT_ERRORS as error: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Transient memcache error: {}'.format(error)) if response is None and not request.has_initial_value(): raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Key does not exist') if response is not None: return response # If the key was not present and an initial value was provided, perform # the mutation client-side and set the key if it still doesn't exist. flags = 0 if request.has_initial_flags(): flags = request.initial_flags() if request.direction() == MemcacheIncrementRequest.INCREMENT: updated_val = request.initial_value() + request.delta() else: updated_val = request.initial_value() - request.delta() updated_val = max(updated_val, 0) % (MAX_INCR + 1) try: response = self._memcache.add( encoded_key, (six.binary_type(updated_val), flags)) except (TRANSIENT_ERRORS + (MemcacheClientError,)): raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Unable to set initial value') if response is False: raise apiproxy_errors.ApplicationError( UNSPECIFIED_ERROR, 'Unable to set initial value') return updated_val def _Dynamic_Increment(self, request, response): """Implementation of increment for memcache. Args: request: A MemcacheIncrementRequest protocol buffer. response: A MemcacheIncrementResponse protocol buffer. """ new_value = self._Increment(request.name_space(), request) response.set_new_value(new_value) def _Dynamic_BatchIncrement(self, request, response): """Implementation of batch increment for memcache. Args: request: A MemcacheBatchIncrementRequest protocol buffer. response: A MemcacheBatchIncrementResponse protocol buffer. """ for request_item in request.item_list(): item = response.add_item() try: new_value = self._Increment(request.name_space(), request_item) except apiproxy_errors.ApplicationError as error: if error.application_error == INVALID_VALUE: item.set_increment_status(MemcacheIncrementResponse.NOT_CHANGED) else: item.set_increment_status(MemcacheIncrementResponse.ERROR) continue item.set_increment_status(MemcacheIncrementResponse.OK) item.set_new_value(new_value) def _Dynamic_FlushAll(self, request, response): """Implementation of MemcacheService::FlushAll(). Args: request: A MemcacheFlushRequest. response: A MemcacheFlushResponse. """ # TODO: Prevent a project from clearing another project's namespace. self._memcache.flush_all() def _Dynamic_Stats(self, request, response): """Implementation of MemcacheService::Stats(). Args: request: A MemcacheStatsRequest. response: A MemcacheStatsResponse. """ # TODO: Gather stats for a project rather than the deployment. hits = 0 misses = 0 byte_hits = 0 items = 0 byte_count = 0 oldest_item_age = 0 for server in six.itervalues(self._memcache.clients): server_stats = server.stats() hits += server_stats.get('get_hits', 0) misses += server_stats.get('get_misses', 0) byte_hits += server_stats.get('bytes_read', 0) items += server_stats.get('curr_items', 0) byte_count += server_stats.get('bytes', 0) # Using the "age" field may not be correct here. The GAE docs claim this # should specify "how long in seconds since the oldest item in the cache # was accessed" rather than when it was created. item_stats = server.stats('items') oldest_server_item = max(age for key, age in six.iteritems(item_stats) if key.endswith(':age')) oldest_item_age = max(oldest_item_age, oldest_server_item) stats = response.mutable_stats() stats.set_hits(hits) stats.set_misses(misses) stats.set_byte_hits(byte_hits) stats.set_items(items) stats.set_bytes(byte_count) stats.set_oldest_item_age(oldest_item_age)
class CouchbaseMemcacheMirror(object): def __init__(self, couchbase_uri, memcached_hosts, primary=PRIMARY_COUCHBASE): """ :param couchbase_uri: Connection string for Couchbase :param memcached_hosts: List of Memcached nodes :param primary: Determines which datastore is authoritative. This affects how get operations are performed and which datastore is used for CAS operations. PRIMARY_COUCHBASE: Couchbase is authoritative PRIMARY_MEMCACHED: Memcached is authoritative By default, Couchbase is the primary store :return: """ self.cb = CbBucket(couchbase_uri) self.mc = McClient(memcached_hosts) self._primary = primary @property def primary(self): return self._primary def _cb_get(self, key): try: return self.cb.get(key).value except NotFoundError: return None def get(self, key, try_alternate=True): """ Gets a document :param key: The key to retrieve :param try_alternate: Whether to try the secondary data source if the item is not found in the primary. :return: The value as a Python object """ if self._primary == PRIMARY_COUCHBASE: order = [self._cb_get, self.mc.get] else: order = [self.mc.get, self._cb_get] for meth in order: ret = meth(key) if ret or not try_alternate: return ret return None def _cb_mget(self, keys): """ Internal method to execute a Couchbase multi-get :param keys: The keys to retrieve :return: A tuple of {found_key:found_value, ...}, [missing_key1,...] """ try: ok_rvs = self.cb.get_multi(keys) bad_rvs = {} except NotFoundError as e: ok_rvs, bad_rvs = e.split_results() ok_dict = {k: (v.value, v.cas) for k, v in ok_rvs} return ok_dict, bad_rvs.keys() def get_multi(self, keys, try_alternate=True): """ Gets multiple items from the server :param keys: The keys to fetch as an iterable :param try_alternate: Whether to fetch missing items from alternate store :return: A dictionary of key:value. Only contains keys which exist and have values """ if self._primary == PRIMARY_COUCHBASE: ok, err = self._cb_get(keys) if err and try_alternate: ok.update(self.mc.get_many(err)) return ok else: ok = self.mc.get_many(keys) if len(ok) < len(keys) and try_alternate: keys_err = set(keys) - set(ok) ok.update(self._cb_mget(list(keys_err))[0]) return ok def gets(self, key): """ Get an item with its CAS. The item will always be fetched from the primary data store. :param key: the key to get :return: the value of the key, or None if no such value """ if self._primary == PRIMARY_COUCHBASE: try: rv = self.cb.get(key) return key, rv.cas except NotFoundError: return None, None else: return self.mc.gets(key) def gets_multi(self, keys): if self._primary == PRIMARY_COUCHBASE: try: rvs = self.cb.get_multi(keys) except NotFoundError as e: rvs, _ = e.split_results() return {k: (v.value, v.cas) for k, v in rvs} else: # TODO: I'm not sure if this is implemented in HasClient :( return self.mc.gets_many(keys) def delete(self, key): st = Status() try: self.cb.remove(key) except NotFoundError as e: st.cb_error = e st.mc_status = self.mc.delete(key) return st def delete_multi(self, keys): st = Status() try: self.cb.remove_multi(keys) except NotFoundError as e: st.cb_error = e st.mc_status = self.mc.delete_many(keys) def _do_incrdecr(self, key, value, is_incr): cb_value = value if is_incr else -value mc_meth = self.mc.incr if is_incr else self.mc.decr st = Status() try: self.cb.counter(key, delta=cb_value) except NotFoundError as e: st.cb_error = e st.mc_status = mc_meth(key, value) def incr(self, key, value): return self._do_incrdecr(key, value, True) def decr(self, key, value): return self._do_incrdecr(key, value, False) def touch(self, key, expire=0): st = Status() try: self.cb.touch(key, ttl=expire) except NotFoundError as e: st.cb_error = st st.mc_status = self.mc.touch(key) def set(self, key, value, expire=0): """ Write first to Couchbase, and then to Memcached :param key: Key to use :param value: Value to use :param expire: If set, the item will expire in the given amount of time :return: Status object if successful (will always be success). on failure an exception is raised """ self.cb.upsert(key, value, ttl=expire) self.mc.set(key, value, expire=expire) return Status() def set_multi(self, values, expire=0): """ Set multiple items. :param values: A dictionary of key, value indicating values to store :param expire: If present, expiration time for all the items :return: """ self.cb.upsert_multi(values, ttl=expire) self.mc.set_many(values, expire=expire) return Status() def replace(self, key, value, expire=0): """ Replace existing items :param key: key to replace :param value: new value :param expire: expiration for item :return: Status object. Will be OK """ status = Status() try: self.cb.replace(key, value, ttl=expire) except NotFoundError as e: status.cb_error = e status.mc_status = self.mc.replace(key, value, expire=expire) return status def add(self, key, value, expire=0): status = Status() try: self.cb.insert(key, value, ttl=expire) except KeyExistsError as e: status.cb_error = e status.mc_status = self.mc.add(key, value, expire=expire) return status def _append_prepend(self, key, value, is_append): cb_meth = self.cb.append if is_append else self.cb.prepend mc_meth = self.mc.append if is_append else self.mc.prepend st = Status() try: cb_meth(key, value, format=FMT_UTF8) except (NotStoredError, NotFoundError) as e: st.cb_error = e st.mc_status = mc_meth(key, value) def append(self, key, value): return self._append_prepend(key, value, True) def prepend(self, key, value): return self._append_prepend(key, value, False) def cas(self, key, value, cas, expire=0): if self._primary == PRIMARY_COUCHBASE: try: self.cb.replace(key, value, cas=cas, ttl=expire) self.mc.set(key, value, ttl=expire) return True except KeyExistsError: return False except NotFoundError: return None else: return self.mc.cas(key, value, cas)