def __init__(self, name, mc_host, mc_port, run=None, run_id=None): self.name = name self.run = run self.run_id = run_id self.mc_host = mc_host self.mc_port = mc_port self.mc_client = Client((self.mc_host, self.mc_port))
def UpdateClassifier(): print("Update Classifier") clf = LogisticRegression( random_state=0, solver='lbfgs', multi_class='multinomial') smt = SMOTETomek(random_state=42) discussions = list(Discussion.objects.filter(reviewed=True)) globalFeaturesIndex = ef.GetGlobalFeaturesIndex( discussions, list(range(0, len(discussions))), ef.E19) X, y = [], [] for discussion in discussions: featureVector = ef.ExtractFeatureFromCorpus( globalFeaturesIndex, discussion.content, ef.E19) X.append(featureVector) y.append(discussion.tag.tag_id) X, y = smt.fit_sample(X, y) selector = GenericUnivariateSelect(chi2, 'percentile', param=20) X = selector.fit_transform(X, y) try: clf.fit(X, y) print("fit done") client = Client(('localhost', 11211)) model = (clf, globalFeaturesIndex, selector) client.set('model', pickle.dumps(model)) model_in_bytes = client.get('model') model_from_cache = pickle.loads(model_in_bytes) print(len(model_from_cache)) except: print("clf failed to cache...")
class MemcachedWrapper(): MEMCACHED_CONNECT_TIMEOUT = 5 # 5 seconds MEMCACHED_TIMEOUT = 5 # 5 seconds def __init__(self, memcached_location): memcached_location_portions = memcached_location.split(":") if len(memcached_location_portions) != 2: raise ValueError( f"Found incorrectly formatted parameter memcached location: {memcached_location}" ) memcached_host = memcached_location_portions[0] memcached_port = int(memcached_location_portions[1]) self.memcached_client = Client( server=(memcached_host, memcached_port), serializer=serde.python_memcache_serializer, deserializer=serde.python_memcache_deserializer, connect_timeout=MemcachedWrapper.MEMCACHED_CONNECT_TIMEOUT, timeout=MemcachedWrapper.MEMCACHED_TIMEOUT) def get(self, key): return self.memcached_client.get(key) def set(self, key, value, timeout=None): self.memcached_client.set(key, value, timeout) def delete(self, key): self.memcached_client.delete(key)
class MyMemcache(object): def __init__(self, ip='localhost', port=11211): # json_serializer(key, value) def json_serializer(key, value): if type(value) == str: return value, 1 return json.dumps(value), 2 # json_deserializer(key, value, flags) python3 def json_deserializer(key, value, flags): if flags == 1: return value.decode('utf-8') if flags == 2: return json.loads(value.decode('utf-8')) raise Exception("Unknown serialization format") self.ip = ip self.port = port self.client = Client((ip, port), serializer=json_serializer, deserializer=json_deserializer, key_prefix='', encoding='utf8', allow_unicode_keys=True) ''' set_task_result ''' @catch_exception def set_value(self, key, value): # set(key, value, expire=0, noreply=None, flags=None) # result = "-".join(result.split()) expire_second = int(60 * 60 * 24 * 1.2) # expire_second must int self.client.set(key=key, value=value, expire=expire_second) # debug_p('[set_value]', key, value) ''' get_task_result ''' @catch_exception def get_value(self, key, default=''): result = self.client.get(key=key, default=default) debug_p('[get_value]', key, result) return result ''' close ''' def client_close(self): try: self.client.close() except Exception as e: # pass
def make_client_pool(self, hostname, mock_socket_values, serializer=None, **kwargs): mock_client = Client(hostname, serializer=serializer, **kwargs) mock_client.sock = MockSocket(mock_socket_values) client = PooledClient(hostname, serializer=serializer) client.client_pool = pool.ObjectPool(lambda: mock_client) return mock_client
def __init__(self, **kwargs): self.client = Client( server=(kwargs.get('address'), int(kwargs.get('port'))), connect_timeout=kwargs.get('connect_timeout', self.DEFAULT_TIMEOUT), timeout=kwargs.get('timeout', self.DEFAULT_TIMEOUT), ignore_exc=kwargs.get('ignore_exc', True))
def startClient(id): global active_threads global max_active_threads try: client = Client(('localhost', 8888)) print('Client ' + str(id) + ' connected') start = time.time() lock.acquire() active_threads += 1 max_active_threads = max(max_active_threads, active_threads) lock.release() for _ in range(500): key, value = randomString(5), randomString(5) client.set(key, value) result = client.get(key) lock.acquire() active_threads -= 1 lock.release() end = time.time() print('Client ' + str(id) + ' exiting. Time spent: ' + str(end - start)) end = time.time() except BaseException as e: print('Exception' + str(e)) return
class MemcachedWrapper(object): '''Class to allow readonly access to underlying memcached connection''' def __init__(self, counter, host, port=11211, socket_connect_timeout=1): if not host: raise ConfigurationError( 'Memcached wrapper improperly configured. Valid memcached host is required!' ) self.__con = Client((host, port)) self._counter = counter('') def __del__(self): self.__con.quit() def get(self, key): return self.__con.get(key) def json(self, key): return pyjson.loads(self.get(key)) def stats(self, extra_keys=[]): data = self.__con.stats() ret = {} for key in data: if key in COUNTER_KEYS: ret['{}_per_sec'.format(key.replace('total_', ''))] = \ round(self._counter.key(key).per_second(data.get(key, 0)), 2) elif key in VALUE_KEYS: ret[key] = data[key] elif key in extra_keys: ret[key] = data[key] return ret
def runOTX(): '''Retrieve intel from OTXv2 API.''' days = int(parser.get('otx', 'days_of_history')) key = parser.get('otx', 'api_key') mem_host = parser.get('memcached', 'mem_host') mem_port = int(parser.get('memcached', 'mem_port')) memcached = Client((mem_host, mem_port)) memcached_agetime = int(parser.get('memcached', 'agetime')) memcached_sleeptime = int(parser.get('memcached', 'sleeptime')) mtime = (datetime.now() - timedelta(days=days)).isoformat() for pulse in iter_pulses(key, mtime): pulse_name = pulse['name'] pulse_id = pulse['id'] for indicator in pulse[u'indicators']: ioc = indicator['indicator'] ioc_type = map_indicator_type(indicator[u'type']) tag = pulse_name + '-' + pulse_id if ioc_type is None: continue try: url = pulse[u'references'][0] except IndexError: url = 'https://otx.alienvault.com' memcached_key = ioc_type + '-' + ioc try: memcached.set(memcached_key.encode('utf-8'), tag.encode('utf-8'), memcached_agetime) except: pass time.sleep(memcached_sleeptime)
def test_incr_decr(client_class, host, port, socket_module): client = Client((host, port), socket_module=socket_module) client.flush_all() result = client.incr(b'key', 1, noreply=False) assert result is None result = client.set(b'key', b'0', noreply=False) assert result is True result = client.incr(b'key', 1, noreply=False) assert result == 1 def _bad_int(): client.incr(b'key', b'foobar') with pytest.raises(MemcacheClientError): _bad_int() result = client.decr(b'key1', 1, noreply=False) assert result is None result = client.decr(b'key', 1, noreply=False) assert result == 0 result = client.get(b'key') assert result == b'0'
def __init__(self, pozyx_offset, lencoder, rencoder, wheel_width=150.5): super(State, self).__init__() self.lencoder = lencoder self.rencoder = rencoder self.wheel_width = wheel_width self.pozyx_offset = pozyx_offset self._state = np.zeros(8) # https://github.com/rlabbe/filterpy self.KF = KalmanFilter(dim_x=8, dim_z=8) self.KF.x = np.array([ 0, ] * 8).T # Will be defined each iteration # self.KF.F = # self.KF.Q = self.KF.H = np.eye(8) # No measurements need to be transformed self.KF.P = np.eye(8) * 500 R_vel = 10 * (self.rencoder.step + self.lencoder.step) R_accel = 100 # 2*10 self.KF.R = np.square( np.diag([ 150 / 2, R_vel, R_accel, 30 / 2, R_vel, R_accel, np.pi / 180, R_vel / (np.pi * self.wheel_width) ])) self.lock = threading.Lock() self.daemon = True self.stop = False self.memcached_client = Client(('localhost', 11211)) self._heading = None self._location = None
def __setstate__(self, state): self.name = state['name'] self.run = state['run'] self.run_id = state['run_id'] self.mc_host = state['mc_host'] self.mc_port = state['mc_port'] self.mc_client = Client((self.mc_host, self.mc_port))
def __init__(self): cf = configparser.ConfigParser() cf.read("./config/config.ini") # elasticache settings # simcomlbs.0rxi5f.cfg.cnw1.cache.amazonaws.com.cn:11211 if "memcachedendpoint" in os.environ: memcachedendpoint = os.environ['memcachedendpoint'] else: memcachedendpoint = cf.get("SimcomLBSMemcached", "memcachedServer") if "memcachedport" in os.environ: memcachedport = os.environ['memcachedport'] else: memcachedport = cf.get("SimcomLBSMemcached", "memcachedServer") logger.info("%s : %s" % (memcachedendpoint, memcachedport)) #-------------Hash client for AWS memcached ------------------------------------------------------ # elasticache_config_endpoint = endpoint + ":" + port # nodes = elasticache_auto_discovery.discover(elasticache_config_endpoint) # nodes = map(lambda x: (x[1], int(x[2])), nodes) # self.memcache_client = HashClient(nodes) #------------------------------------------------------------------------------------------ # ----------------Directlly connected to pod memecached------------------------------------ self.memcache_client = Client(memcachedendpoint, memcachedport) self.memcache_client.set(self, 'abcd123', "1234ab")
class MemcachedCache_Master(MemcachedCache, object): """ the master instance of the memcached cache interface. copies of this object will NOT share the connection, but instantiate their own. When this master is destroyed it can flush the database, effectively erasing all data Parameters ---------- memc_params : dict dictionary with keys [host, port] to the memcached-server instance wait_on_insert : bool if True, wait on insert operations to successfully complete before continuing (default: False) val_ttl : int >0 the time to life for each value in seconds flush_on_del : bool flush the databases when the object is destroyed (default: False) """ def __init__(self, memc_params=_DEFAULT_MEMCACHE_CRED, wait_on_insert=False, val_ttl=12, flush_on_del=False): self.is_master = True self.wait_on_insert = wait_on_insert self.memc_params = memc_params self.val_ttl = val_ttl self.flush_on_del = flush_on_del self.client = None def __del__(self): if self.flush_on_del: self.client = Client((self.memc_params['host'], self.memc_params['port']), default_noreply=not self.wait_on_insert) self.client.flushdb()
class Reader(BaseReader): """ Memcached settings Reader A simple memcached getter using pymemcache library. """ _default_conf = { 'host': 'localhost', 'port': 11211 } def __init__(self, conf): super(Reader, self).__init__(conf) self.client = Client((self.conf['host'], self.conf['port'])) def _get(self, key): result = self.client.get(key) if isinstance(result, six.binary_type): result = result.decode('utf-8') return result def _set(self, key, value): self.client.set(key, value, noreply=False)
class CHIDriverMemcache(CHIDriver): """Драйвер Memcache.""" def __init__(self, *av, **kw): """Конструктор.""" super().__init__(*av, **kw) self.client = Memcache( (self.server[0]["host"], self.server[0]["port"]), connect_timeout=self.connect_timeout, timeout=self.request_timeout, ) def driver_set(self, key, packed_chi_object, ttl): """Переопределение для установки драйвера - в редисе используются две команды, а тут - одна.""" self.client.set(key, packed_chi_object, ttl) def keys(self, mask): """Возвращает ключи по маске.""" raise CHIMethodIsNotSupportedException( "Метод keys для мемкеша не поддерживается.") def erase(self, mask): """Удаление ключей по маске.""" raise CHIMethodIsNotSupportedException( "Метод erase для мемкеша не поддерживается.")
def __init__(self, mc_host, mc_port): self.mc_host = mc_host self.mc_port = mc_port self.client = Client((mc_host, mc_port)) self.setupMonitoringVariables() self.calculateInitialScore()
def test_socket_connect_unix(self): server = '/tmp/pymemcache.{pid}'.format(pid=os.getpid()) with MockUnixSocketServer(server): client = Client(server) client._connect() assert client.sock.family == socket.AF_UNIX
def run(self): client = Client(('127.0.0.1', 11211)) url = 'https://talosintelligence.com/documents/ip-blacklist' talosvalue = 'talos-ip' try: response = requests.get(url) if (response): responseArr = [] for line in response.text.splitlines(): if (line.startswith('#') == False): responseArr.append(line) valueCheck = client.get_many(responseArr) for k in responseArr: valueArr = [] tempArr = [] tempArr.append(talosvalue) if k in valueCheck: val = valueCheck[k].decode() valueArr = talosToMemcache.stringHelper(val) for item in valueArr: if item not in tempArr: tempArr.append(item) client.set(k, tempArr, 300) except Exception as e: with open('/var/log/misppullLog.txt', 'a') as file: file.write( '{0} - talosFeed-script failed with error: {1} \n'.format( str(time.asctime()), str(e)))
class MemcacheCli(object): def __init__(self, host, port): self.host = host self.port = port self.client = Client((self.host, self.port)) def set(self, key, value, expire): try: return self.client.set(key, value, expire) except Exception as e: return False def get(self, key): try: return self.client.get(key, default=None) except Exception as e: return None def mset(self, values, expire): try: return self.client.set_many(values, expire) except Exception as e: return False def mget(self, keys): try: return self.client.get_many(keys) except Exception as e: return None
def main(): line1 = Line(1, 5) line2 = Line(2, 6) line3 = Line(6, 8) line4 = Line(7, 9) assert do_intersect(line1, line2) == True assert do_intersect(line1, line3) == False assert do_intersect(line2, line1) == True assert do_intersect(line3, line1) == False assert do_intersect(line3, line4) == True assert compare_versions("1.1", "1.2") == -1 assert compare_versions("1.2.1", "1.2") == 1 assert compare_versions("1.2.1", "1.2.1a") == -1 assert compare_versions("1.2.1b", "1.2.1a") == 1 assert compare_versions("1.2.1", "1.21") == -1 assert compare_versions("2.1.1", "1.2.1") == 1 assert compare_versions("1.2.1", "1.2.1") == 0 assert compare_versions("1.", "1") == 0 assert compare_versions("a1", "1a") == 1 assert compare_versions("1.2.1", "1.19.0") == -1 client = Client('127.0.0.1', 11211) client.set('some_key', 'some_value', expire = 30) assert client.get('some_key') == 'some_value' print("OK")
class Client: def __init__(self, config): self.config = config self.memcache_client = PymemcacheClient(server=(self.config["ip"], self.config["port"]), serializer=self.json_serializer, deserializer=self.json_deserializer, connect_timeout=self.config["connect_timeout"], timeout=self.config["timeout"]) def json_serializer(self, key, value): if type(value) == str: return value, 1 return json.dumps(value), 2 def json_deserializer(self, key, value, flags): if flags == 1: return value if flags == 2: return json.loads(value) raise Exception("Unknown serialization format") def write(self, key, message): logger.info("Writing to cache: {}".format(key)) self.memcache_client.set(key, message, expire=self.config["key_expiration"], noreply=self.config["noreply_flag"]) def read(self, key): logger.info("Reading from cache: {}".format(key)) return self.memcache_client.get(key)
class MemcachedWrapper(object): '''Class to allow readonly access to underlying memcached connection''' def __init__(self, counter, host, port=11211, socket_connect_timeout=1): if not host: raise ConfigurationError('Memcached wrapper improperly configured. Valid memcached host is required!') self.__con = Client((host, port)) self._counter = counter('') def __del__(self): self.__con.quit() def get(self, key): return self.__con.get(key) def json(self, key): return pyjson.loads(self.get(key)) def stats(self, extra_keys=[]): data = self.__con.stats() ret = {} for key in data: if key in COUNTER_KEYS: ret['{}_per_sec'.format(key.replace('total_', ''))] = \ round(self._counter.key(key).per_second(data.get(key, 0)), 2) elif key in VALUE_KEYS: ret[key] = data[key] elif key in extra_keys: ret[key] = data[key] return ret
class BaseModel(object): def __init__(self): self.conn = pymysql.connect(host=CONFIG['DB_HOST'], port=CONFIG['DB_PORT'], user=CONFIG['DB_USER'], password=CONFIG['DB_PASSWORD'], db=CONFIG['DATABASE'], charset='utf8mb4') if CONFIG['MEMCACHED_ADDRESS'] and CONFIG['MEMCACHED_PORT']: host, port = CONFIG['MEMCACHED_ADDRESS'], CONFIG['MEMCACHED_PORT'] self.cache = Client( (host, port), timeout=CONFIG['MEMCACHED_TIMEOUT'], deserializer=lambda k, v, f: str(v, encoding='utf-8') if isinstance(v, bytes) else v) self.cache_expire = CONFIG['MEMCACHED_CACHE_EXPIRE'] def __del__(self): """ close connection :return: None """ if hasattr(self, 'cache'): self.cache.close() if hasattr(self, 'conn'): self.conn.close()
class NoSQLCache: __instance = None timeout = 5 @staticmethod def inst(): if NoSQLCache.__instance == None: NoSQLCache.__instance = NoSQLCache(SqliteDb.inst()) return NoSQLCache.__instance def __init__(self, sql_db_object): self.client = Client(('localhost', 11211)) self.sql_db = sql_db_object def get(self, key): key_corrector = re.sub(r' ', '$', key) value = self.client.get(key_corrector) if value: return eval(value.decode('utf-8')) else: value = self.sql_db.get(key) self.set(key_corrector, value, self.timeout) return value def set(self, key, value, timeout): self.client.set(key, value, expire=timeout)
def execqueryWithMem(name, c1, c2): try: mysql_conn = mysql.connector.connect(host=host, user=dbusername, password=dbpassword, database=dbname, port=port) mc = Client(('cloudassgn.hw0lsb.0001.use2.cache.amazonaws.com', 11211)) query = 'select ' + name + ' from Earthquake where ' + c1 + ' and ' + c2 hashVal = hashlib.sha224(query).hexdigest() starttime = int(round(time.time() * 1000)) data = mc.get(hashVal) count = 0 if not data: for i in range(0, 250): cursor = mysql_conn.cursor() cursor.execute(query) row = cursor.fetchall() count = cursor.rowcount cursor.close() mc.set(hashVal, count) endtime = int(round(time.time() * 1000)) totalexectime = endtime - starttime mysql_conn.close() resultStr = '<div style="font-size:14px;margin-top: 30px;"><div> Last Name : Manakan </div><div> Last 4 digit ID : 6131 </div><div> Class Section : 10:30 AM </div></div>' resultStr = resultStr + '<br> Time taken : ' + str(totalexectime) + ' msecs' resultStr = resultStr + '<br> Rows effected : ' + str(count) return resultStr except Exception as e: print e return 'Error ' + str(e)
def __init__(self, counter, host, port=11211, socket_connect_timeout=1): if not host: raise ConfigurationError( 'Memcached wrapper improperly configured. Valid memcached host is required!' ) self.__con = Client((host, port)) self._counter = counter('')
def __init__(self, log_): try: self.log = log_ self.client = Client(('localhost', 11211), serializer=self.json_serializer, deserializer=self.json_deserializer) except Exception as e: log_.error('{0}'.format(e.message))
def set_test(request): client = Client(('/opt/www/memcached/memcached.sock'), serializer=json_serializer, deserializer=json_deserializer) client.set('new', {'status': 'updated'}) # result = client.get('key') return JsonResponse({'status': 'ok'})
def adduser(request): if request.method == 'POST': data = request.get_data() json_data = json.loads(data.decode('utf-8')) username = json_data.get('username') password = json_data.get('password') email = json_data.get('email') userid = ''.join([ random.choice(string.ascii_letters + string.digits) for i in range(16) ]) if username is None or password is None or email is None: return json.dumps({'status': 'error', "error": "have empty block"}) key = ''.join([ random.choice(string.ascii_letters + string.digits) for i in range(16) ]) timestamp = int(time.time()) res = dbcontroller.mysql_checkuser(username, email) if res == False: return json.dumps({ 'status': 'error', 'error': 'email already register or username already in use' }) #use memcache mc = Client((memhost, memport)) info = { "username": username, "password": password, "email": email, "timestamp": timestamp, "key": key, "userid": userid } print(info) mc.set(email, info) #return json.dumps({'status':mc.get(email)}) if sendMail(email, key) == True: return json.dumps({'status': 'OK'}) else: return json.dumps({ 'status': 'error', 'error': 'invalid email address' }) else: return json.dumps({'status': 'error'})
class Memcached(object): """Memcached based caching. This is extremely lightweight in terms of memory usage, but it appears to be slightly slower than local database caching, and a lot slower than local memory based caching. This should be used if the cache is to be shared between multiple users. """ LRU = LeastRecentlyUsed = 0 def __init__(self, url='127.0.0.1', mode=LRU, ttl=None): """Create a new engine. Parameters: url (str): Memcached server to connect to. mode (int): How to purge the old keys. This does not affect anything as memcached is LRU only. The option is there to match the other engines. ttl (int): Time the cache is valid for. Set to None or 0 for infinite. """ try: from pymemcache.client.base import Client self.client = Client(url, timeout=3, connect_timeout=3) except ImportError: from memcache import Client self.client = Client([url], socket_timeout=3) self.ttl = ttl # Don't allow cross version caches # Pickle may have incompatibilities between versions self._key_prefix = '{}.{}.'.format(sys.version_info.major, sys.version_info.minor) def get(self, key): """Get the value belonging to a key. An error will be raised if the cache is expired or doesn't exist. """ value = self.client.get(self._key_prefix + key) if value is None: raise exceptions.CacheNotFound(key) return _decode(value) def put(self, key, value, ttl=None): """Write a new value to the cache. This will overwrite any old cache with the same key. """ if ttl is None: ttl = self.ttl self.client.set(self._key_prefix + key, _encode(value), ttl or 0, noreply=True) def delete(self, key): """Delete an item of cache if it exists.""" return self.client.delete(self._key_prefix + key, noreply=False)
def plugin(srv, item): srv.logging.debug("*** MODULE=%s: service=%s, target=%s", __file__, item.service, item.target) if HAVE_SLACK == False: srv.logging.error("slacker module missing") return False token = item.config.get("token") if token is None: srv.logging.error("No token found for slack") return False try: channel_id, size_x, size_y, timespan, dir_to_save = item.addrs except: srv.logging.error("Incorrect target configuration") return False # make animated gif, save to local disk, upload to slack channel client = Client(("127.0.0.1", 11211)) images_original = [] images_resized = [] size = size_x, size_y cur_month = datetime.datetime.now().strftime("%Y%m") cur_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") cur_dir = dir_to_save + "/" + cur_month if not os.path.exists(cur_dir): os.makedirs(cur_dir) cur_img_original = cur_dir + "/" + cur_time + ".gif" cur_img_resized = cur_dir + "/" + cur_time + "_s.gif" # make a gif file try: result = client.get("curno") if result: curno = int(result) for i in range((curno - timespan), curno): data = client.get(str(i)) if data: im = Image.open(StringIO.StringIO(data)) images_original.append(im) imresized = ImageOps.fit(im, size, Image.ANTIALIAS) images_resized.append(imresized) if len(images_original) > 0: images2gif.writeGif(cur_img_original, images_original, duration=0.2) images2gif.writeGif(cur_img_resized, images_resized, duration=0.2) except Exception, e: srv.logging.warning("Cannot make a gif: %s" % (str(e))) return False
def test_socket_close(self): server = ("example.com", 11211) client = Client(server, socket_module=MockSocketModule()) client._connect() assert client.sock is not None client.close() assert client.sock is None
def make_client(self, mock_socket_values, **kwargs): client = Client(None, **kwargs) # mock out client._connect() rather than hard-settting client.sock to # ensure methods are checking whether self.sock is None before # attempting to use it sock = MockSocket(list(mock_socket_values)) client._connect = mock.Mock(side_effect=functools.partial( setattr, client, "sock", sock)) return client
def test_socket_connect_closes_on_failure(self): server = ("example.com", 11211) socket_module = MockSocketModule(connect_failure=OSError()) client = Client(server, socket_module=socket_module) with pytest.raises(OSError): client._connect() assert len(socket_module.sockets) == 1 assert socket_module.sockets[0].connections == [] assert socket_module.sockets[0].closed
def test_socket_close_exception(self): server = ("example.com", 11211) socket_module = MockSocketModule(close_failure=OSError()) client = Client(server, socket_module=socket_module) client._connect() assert client.sock is not None client.close() assert client.sock is None
class MemcachedCache(CachualCache): """A cache using `Memcached <https://memcached.org/>`_ as the backing cache. The same caveats apply to keys and values as for Redis - you should only try to store strings (using the packing/unpacking functions). See the documentation on Keys and Values here: :class:`pymemcache.client.base.Client`. :type host: string :param host: The Memcached host to use for the cache. :type port: integer :param port: The port to use for the Memcached server. :type kwargs: dict :param kwargs: Any additional args to pass to the :class:`CachualCache` constructor. """ def __init__(self, host='localhost', port=11211, **kwargs): super(MemcachedCache, self).__init__(**kwargs) self.client = MemcachedClient((host, port)) def get(self, key): """Get a value from the cache using the given key. :type key: string :param key: The cache key to get the value for. :returns: The value for the cache key, or None in the case of cache miss. """ return self.client.get(key) def put(self, key, value, ttl=None): """Put a value into the cache at the given key. For constraints on keys and values, see :class:`pymemcache.client.base.Client`. :type key: string :param key: The cache key to use for the value. :param value: The value to store in the cache. :type ttl: integer :param ttl: The time-to-live for key in seconds, after which it will expire. """ if ttl is None: ttl = 0 self.client.set(key, value, expire=ttl)
class Memcached(object): DELAY = 0.5 DEBUG = False def __init__(self, hostname, port, **params): self.mc = Client((hostname, port)) def handle(self, topic, message): """ """ if 'cmd' not in message: raise Exception("Bad message: no command") cmd = message['cmd'] if not hasattr(self, cmd): raise Exception("Unknown command: " + cmd) tryit = True while tryit: tryit = False try: getattr(self, cmd)(message) except MemcacheUnexpectedCloseError: # Server dropped dead - we'll retry tryit = True except IOError: # Something network-related - retry tryit = True if tryit: time.sleep(self.DELAY) def set(self, message): text = message['val'].encode('utf-8') if message.get('sbt', None): purge_time = time.time() + message.get('uto', 0) text = text.replace('$UNIXTIME$', '%.6f' % purge_time) if self.DEBUG: print("Set {0}-{1}-{2}".format(message['key'].encode('utf-8'), text, int(message['ttl']))) self.mc.set(message['key'].encode('utf-8'), text, int(message['ttl'])) def delete(self, message): self.mc.delete(message['key'])
def __init__(self, timeline_rate_reserve=5, multi_proc_logger=None): """ The reserve arguments are how many requests to hold back on to leave some form of buffer in place with regards to API limits """ if multi_proc_logger: self.logger = multi_proc_logger else: self.logger = utils.MultiProcessCheckingLogger(module_logger) self.memcacheClient = MemCacheClient( (interns_settings.memcache_host, interns_settings.memcache_port) ) self.timeline_rate_reserve = timeline_rate_reserve self.tl_total_reqs = interns_settings.twitter_timeline_requests self.tl_reqs_left = interns_settings.twitter_timeline_req_left self.tl_reqs_reset_time = interns_settings.twitter_timeline_reset_time self.update_limits()
def test_serialization_deserialization(host, port, socket_module): def _ser(key, value): return json.dumps(value).encode('ascii'), 1 def _des(key, value, flags): if flags == 1: return json.loads(value.decode('ascii')) return value client = Client((host, port), serializer=_ser, deserializer=_des, socket_module=socket_module) client.flush_all() value = {'a': 'b', 'c': ['d']} client.set(b'key', value) result = client.get(b'key') assert result == value
def make_client(self, mock_socket_values, **kwargs): client = Client(None, key_prefix=b"xyz:", **kwargs) client.sock = MockSocket(list(mock_socket_values)) return client
def make_client(self, mock_socket_values, **kwargs): mock_client = Client(None, **kwargs) mock_client.sock = MockSocket(list(mock_socket_values)) client = PooledClient(None, **kwargs) client.client_pool = pool.ObjectPool(lambda: mock_client) return client
handleCommand("412") time.sleep(1) # check current input source no if handleCommand("47"): time.sleep(4) m, n = getscreenimage() return m, n #------------------------ #logging.basicConfig( # format='# %(levelname)s: %(message)s', # level=logging.DEBUG, #) client = Client(('127.0.0.1', 11211)) try: result = client.get('appletv') if result: sys.exit("time is not passed") else: print ("appletv is on") # get current channel and change input to HDMI2 # tv should be on m, n = getCHandHDMI2() if m != n: changeinout(m, n) else: handleCommand("20") client.set('appletv', 'true', 60)
def test_socket_connect_unix(self): server = "/tmp/memcache" with MockUnixSocket(server): client = Client(server) assert client._use_unix_socket client._connect()
def test_socket_connect(self): server = ("example.com", 11211) client = Client(server, socket_module=MockSocketModule()) client._connect() assert client.sock.connections == [server] timeout = 2 connect_timeout = 3 client = Client( server, connect_timeout=connect_timeout, timeout=timeout, socket_module=MockSocketModule(), ) client._connect() assert client.sock.timeouts == [connect_timeout, timeout] client = Client(server, socket_module=MockSocketModule()) client._connect() assert client.sock.socket_options == [] client = Client(server, socket_module=MockSocketModule(), no_delay=True) client._connect() assert client.sock.socket_options == [ (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) ]
def make_client(self, values): client = Client(None) client.sock = MockSocket(list(values)) return client
class TwitterLimits(object): """ Checks against either the twitter API or memcached twitter API results for twitter API requests limits """ def __init__(self, timeline_rate_reserve=5, multi_proc_logger=None): """ The reserve arguments are how many requests to hold back on to leave some form of buffer in place with regards to API limits """ if multi_proc_logger: self.logger = multi_proc_logger else: self.logger = utils.MultiProcessCheckingLogger(module_logger) self.memcacheClient = MemCacheClient( (interns_settings.memcache_host, interns_settings.memcache_port) ) self.timeline_rate_reserve = timeline_rate_reserve self.tl_total_reqs = interns_settings.twitter_timeline_requests self.tl_reqs_left = interns_settings.twitter_timeline_req_left self.tl_reqs_reset_time = interns_settings.twitter_timeline_reset_time self.update_limits() def update_limits(self): """ Update the limits associated with the twitter api First attempts to check memcache and then checks directly with the twitter API """ self.logger.debug(__name__, 'Updating twitter limits') cache_total_tl_reqs = self.memcacheClient.get('timeline_limit') if cache_total_tl_reqs: self.tl_total_reqs = int(cache_total_tl_reqs) cache_tl_reqs_left = self.memcacheClient.get('timeline_remaining') if cache_tl_reqs_left: self.tl_reqs_left = int(cache_tl_reqs_left) cache_tl_reqs_reset_time = self.memcacheClient.get( 'timeline_reset' ) if cache_tl_reqs_reset_time: self.tl_reqs_reset_time = int(cache_tl_reqs_reset_time) utc_now = datetime.utcnow() utc_secs = (utc_now - epoch_time).total_seconds() secs_until_reset = self.tl_reqs_reset_time - utc_secs if secs_until_reset <= 0: # Force getting rates from twitter self.tl_reqs_reset_time = None update_values_valid = ( self.tl_total_reqs and (self.tl_reqs_left is not None) and self.tl_reqs_reset_time ) if not update_values_valid: self.logger.debug(__name__, 'Making twitter API limits request') update_vals = ( twitterClient.get_user_timeline_rate_limit() ) self.tl_total_reqs = update_vals.limit self.tl_reqs_left = update_vals.remaining self.tl_reqs_reset_time = update_vals.reset else: self.logger.debug( __name__, 'Using twitter API limits from memcache' ) self.logger.debug( __name__, 'Total twitter timeline requests allowed is: {0}'.format( self.tl_total_reqs ) ) self.logger.debug( __name__, 'Number of twitter timeline requests left is: {0}'.format( self.tl_reqs_left ) ) self.logger.debug( __name__, 'Twitter timeline request reset time is: {0}'.format( self.tl_reqs_reset_time ) ) def get_sleep_between_jobs(self): """ Calculate the sleep time between jobs as to not run into the twitter API limits """ self.update_limits() self.logger.debug(__name__, 'Updating sleep time between jobs') utc_now = datetime.utcnow() utc_secs = (utc_now - epoch_time).total_seconds() self.logger.debug(__name__, 'UTC in seconds {0}'.format(utc_secs)) secs_until_reset = self.tl_reqs_reset_time - utc_secs self.logger.debug( __name__, 'Seconds until API reset {0}'.format(secs_until_reset) ) buffered_tl_reqs_left = ( self.tl_reqs_left - self.timeline_rate_reserve ) self.logger.debug( __name__, 'Buffered timeline requests left {0}'.format( buffered_tl_reqs_left ) ) if buffered_tl_reqs_left <= 0: sleep_time = secs_until_reset else: sleep_time = secs_until_reset / buffered_tl_reqs_left self.logger.debug(__name__, 'Sleep time {0}'.format(sleep_time)) return sleep_time
def __init__(self, host='localhost', port=11211, **kwargs): super(MemcachedCache, self).__init__(**kwargs) self.client = MemcachedClient((host, port))
def test_misc(client_class, host, port, socket_module): client = Client((host, port), socket_module=socket_module) client.flush_all()
def __init__(self, counter, host, port=11211, socket_connect_timeout=1): if not host: raise ConfigurationError('Memcached wrapper improperly configured. Valid memcached host is required!') self.__con = Client((host, port)) self._counter = counter('')
def make_client(self, mock_socket_values, serializer=None): client = Client(None, serializer=serializer) client.sock = MockSocket(list(mock_socket_values)) return client
def make_client(self, mock_socket_values, serializer=None): mock_client = Client(None, serializer=serializer, key_prefix=b'xyz:') mock_client.sock = MockSocket(list(mock_socket_values)) client = PooledClient(None, serializer=serializer, key_prefix=b'xyz:') client.client_pool = pool.ObjectPool(lambda: mock_client) return client
#!/usr/bin/env python import sys import time import urllib2 from PIL import Image from pymemcache.client.base import Client client = Client(('127.0.0.1', 11211)) url = "http://127.0.0.1:5080/index1.jpg" i = 0 try: while True: try: input = urllib2.urlopen(url) input.readline(); input.readline() content_length = int(input.readline().split(": ")[1].strip()) input.readline(); input.readline() data = input.read(content_length) client.set(str(i), data, 30) client.set('curno', i) i = i + 1 time.sleep(0.7) except Exception, e: print e.__doc__ print e.message sys.exit(1) except KeyboardInterrupt:
def __init__(self, conf): super(Reader, self).__init__(conf) self.client = Client((self.conf['host'], self.conf['port']))
def gao_memcache(): client = Client((app.config['MEMCACHE_HOST'], app.config['MEMCACHE_PORT'])) client.set('some_key', 'some_value', expire=10) result = client.get('some_key') print result
def __init__(self, hostname, port, **params): self.mc = Client((hostname, port))