def __init__(self, addr, port): self.socket_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket_server.bind((addr, port)) self.socket_server.listen(5) self.api_key = "trnsl.1.1.20200407T132632Z.19b1f0ccbc9d05c9.e82289b798de7ce9ec5d52e568274deb2ba54216" self.url = "https://translate.yandex.net/api/v1.5/tr.json/translate" self.old_cache = base.Client(('memcached', 11211), ignore_exc=True) self.new_cache = base.Client(('memcached', 11211)) self.cache_client = fallback.FallbackClient( (self.new_cache, self.old_cache))
def main(): memcache_host = os.environ['MEMCACHED_HOST'] memcache_port = int(os.environ['MEMCACHED_PORT']) mc_client = membase.Client((memcache_host, memcache_port)) print('Fetching key test_key:') val = mc_client.get('test_key') if val is not None: print(f'Got weird value! Expected null, got {val}') sys.exit(1) original = {'foo': 7} original_json = json.dumps(original) original_bytes = original_json.encode('utf-8') print(f'Setting key test_key to: {original_bytes} (2 second expire time)') mc_client.set('test_key', original_bytes, expire=2) print('Fetching key test_key:') val = mc_client.get('test_key') if val != original_bytes: print(f'Got weird value! Expected {original_bytes}, got {val}') sys.exit(1) print('Sleeping 2.2 seconds to allow expiration') time.sleep(2.2) print('Fetching key test_key:') val = mc_client.get('test_key') if val is not None: print(f'Got weird value! Expected null, got {val}') sys.exit(1) print('All tests passed')
def fetch_status_from_api_and_update(awbno): url = 'https://wowship.wowexpress.in/index.php/api/detailed_status/trackAwb' headers = {'Content-Type': "application/json", 'Cache-Control': "no-cache"} param = {'awb': awbno} resp = requests.request("GET", url, headers=headers, params=urllib.parse.urlencode(param)) if resp.status_code == SUCCESS: resp = resp.json()[0] if 'scan_detail' in resp: _client = base.Client(('localhost', 11211)) _client.set(awbno, resp['scan_detail'][-1]['status']) shipment_obj = Shipment.objects( awbno=awbno).only('details').first() shipment_obj_length = len(shipment_obj['details']) updated_status_length = len(resp['scan_detail']) diff = updated_status_length - shipment_obj_length if diff > 0: for i in range(0, diff): shipment_obj.details[shipment_obj_length + i] = resp['scan_detail'][ shipment_obj_length + i] shipment_obj.save()
def __call__(self, request): if "OBCIAZNIK" in request.headers: self.process_request(request) response = self.get_response(request) response = self.process_response(request, response) response['cpu_usage_current'] = json.dumps( { 'usage': psutil.cpu_percent(), 'timestamp': datetime.now() }, default=str) times = psutil.cpu_times() response['cpu_time_spent_user'] = times.user response['cpu_time_spent_system'] = times.system response['cpu_time_spent_idle'] = times.idle response['memory_usage'] = psutil.virtual_memory()[2] response['container_id'] = socket.gethostbyname( socket.gethostname()) if os.getenv('RUN_MEMCACHE'): if os.getenv('RUN_MEMCACHE') == 'TRUE': client = base.Client(('localhost', 11211)) response['cpu_usage_aggregated'] = client.get('CPU_USAGE') return response else: return self.get_response(request)
def __init__(self, hostname='0.0.0.0', port=11211): BaseCache.__init__(self) self._hostname = hostname self._port = port self._client = base.Client((self._hostname, self._port))
def gravar(): voos = request.get_json() if voos: client = base.Client((BANCO_VOLATIL, 11211)) client.set("voos", voos) return "Ok"
def getDatapoint(self, request, context): client = base.Client(('memcached', 11211)) try: result = client.get(request.k) return rpc.Value(v=result) except: return rpc.Value(v='')
def __init__(self, host, port, cacheExpirationTime): """ Variables for client, backup Clients, fallbackClient instances, cache Expiration Time and old Client Cache """ self.client = base.Client((host, port)) self.fallbackClient = None self.backupClients = None self.cacheExpirationTime = cacheExpirationTime self.oldClient = None
def __enter__(self): self.connection = base.Client( (self.host, self.port), serializer=serde.python_memcache_serializer, deserializer=serde.python_memcache_deserializer, ) return self.connection
def create_secret(mdict): """ The function creates a random key and saves it in the database to be used in the next request to verfy the users request.(verify_response()). """ client = base.Client(('localhost', 11211)) try: nu = None exists = User_reg.objects.filter( phone_number=mdict['phone_number']).count() if exists != 0: nu = User_reg.objects.get(phone_number=mdict['phone_number']) if nu.isVerified: secret = str(uuid.uuid1().int % 1000000) print('otp is ' + secret) client.set(mdict['phone_number'], secret, 90) print(client.get(mdict['phone_number'])) else: print('user has not been verified') return 'failure' else: return 'no such user exists' return secret except: print('fail exception occured') return 'failure' return 0
def generatePlaylist(): id = request.args.get('id') x = xspf.Xspf() client = base.Client(('localhost', 11211)) playlist = client.get('playlist.' + id) if playlist is None: playlist = requests.get('http://127.0.0.1:5300/v1/playlists/' + str(id)) playlist = playlist.json() # Cache the result for next time: client.set('playlist.' + id, playlist, expire=120) # cache for 2 minutes x.title = playlist.get('title') x.info = playlist.get('playlist_description') x.creator = playlist.get('creator') if (playlist.get('tracks')): tracks = playlist.get('tracks') for track in tracks: x.add_track(title=track.get('title'), creator=track.get('artist'), album=track.get('album_title'), location=track.get('media_url')) xml = x.toXml() return Response(xml, mimetype='audio/xspf')
async def anticaptcha_cache_get_handle(request): """ GET task_id - task id from AntiCaptcha Response json - {'taskId': <task_id>, 'message': {<solve_json>} or {'taskId': <task_id>, 'message': {"taskId": <task_id>, "status": "processing"} """ # get task id from request task_id = request.match_info["task_id"] # init client client = base.Client(("localhost", 11211), connect_timeout=10) # get data from key-value cache cache_data = client.get(task_id) client.close() if cache_data: # response dict data = {"taskId": task_id, "message": json.loads(cache_data.decode())} else: # response dict data = { "taskId": task_id, "message": {"taskId": task_id, "status": "processing"}, } return web.json_response(data)
def save_rucaptcha_result_cache(message: dict): # init client client = base.Client(("localhost", 11211)) # set data key-value data to cache client.set(message.get("id"), message.get("code"), expire=3600) client.close()
async def rucaptcha_cache_get_handle(request): """ GET task_id - task id from RuCaptcha Response json - {'id': <task_id>, 'code': <solve_code>} or {'id': <task_id>, 'code': "CAPCHA_NOT_READY"} """ # get task id from request task_id = request.match_info["task_id"] # init client client = base.Client(("localhost", 11211), connect_timeout=10) # get data from key-value cache cache_data = client.get(task_id) client.close() if cache_data: # response dict data = {"id": task_id, "code": cache_data.decode()} else: # response dict data = {"id": task_id, "code": "CAPCHA_NOT_READY"} return web.json_response(data)
def save_anticaptcha_result_cache(message: dict): # init client client = base.Client(("localhost", 11211)) # set data key-value data to cache client.set(message.get("taskId"), json.dumps(message), expire=3600) client.close()
def playlist(id): #run memcached client = base.Client(('localhost', 11211)) result = client.get(str(id)) if result is None: getPlaylist = "http://localhost:8000/playlists/" + str(id) result = requests.get(getPlaylist) client.set(str(id), result, 60) playlists = requests.get(getPlaylist).json() else: getPlaylist = "http://localhost:8000/playlists/" + str(id) playlists = requests.get(getPlaylist).json() track_ids = playlists[0]['tracks'] get_track = [] for track_id in track_ids: track = requests.get("http://localhost:8000/tracks/" + str(track_id)).json()[0] track['description'] = requests.get( "http://localhost:8000/descriptions/" + str(track_id)).json()[0] get_track.append(track) # renders xspf template file template = render_template('playlist.xspf', playlist=playlists, tracks=get_track) response = make_response(template) response.headers['Content-Type'] = 'application/xspf' return response
def read_sensorvalues_from_cache(): """ This function responds to a request for /api/sonicsensor :return: all sensor values and timestamp """ client = base.Client((properties.memcacheHost, properties.memcachePort)) response = [{ "Sensor1": { "distance": int(client.get('sensor1')), "timestamp": client.get('sensor1_timestamp') }, "Sensor2": { "distance": int(client.get('sensor2')), "timestamp": client.get('sensor2_timestamp') }, "Sensor3": { "distance": int(client.get('sensor3')), "timestamp": client.get('sensor3_timestamp') }, "Sensor4": { "distance": int(client.get('sensor4')), "timestamp": client.get('sensor4_timestamp') } }] return response
def __init__(self): firebase_admin.initialize_app() self.db = firestore.client() self.status = self.db.collection(u'status') self.conf = self.db.collection(u'conf') self.mission = self.db.collection(u'mission') self.client = base.Client(('localhost', 11211))
def memcached_server(): import os # the server must be set in the environment memcached_config = os.environ.get("MEMCACHED_SERVER", None) if memcached_config is None: return None # expect either SERVER or SERVER:PORT entries memcached_config = memcached_config.split(":") if len(memcached_config) == 1: # the default memcached port memcached_config.append(11211) # we need the server and the port if len(memcached_config) != 2: return None try: from pymemcache.client import base except ImportError: return None # wait 1 second to try to connect, it's not worthwhile as a cache if it's slow return base.Client( ( memcached_config[0], memcached_config[1], ), connect_timeout=1, timeout=1, )
def index(): # POST request if request.method == 'POST': client = base.Client(('127.0.0.1', 11211)) data = request.get_json() print("Incoming...") windBearing = data["winddir"] windDirection = "N" if windBearing >= 0 and windBearing <= 22 or windBearing >= 337 and windBearing <= 360: windDirection = "N" elif windBearing >= 23 and windBearing <= 67: windDirection = "NE" elif windBearing >= 68 and windBearing <= 112: windDirection = "E" elif windBearing >= 113 and windBearing <= 157: windDirection = "SE" elif windBearing >= 158 and windBearing <= 202: windDirection = "S" elif windBearing >= 203 and windBearing <= 247: windDirection = "SW" elif windBearing >= 248 and windBearing <= 292: windDirection = "W" elif windBearing >= 293 and windBearing <= 336: windDirection = "Nw" client.set('windDir', windDirection) client.set('windSpd', round(data["windspeedmph"])) client.set('windSpdGust', round(data["windgustmph"])) client.set('temp', round(data["tempc"])) client.set('humidity', round(data["humidity"])) return 'OK', 200
def context_analysis(post): client = base.Client(('localhost', 11211)) stop = stopwords.words('english') articles = [post] predefined_groups = client.get('0') # '''getting all the groups from cache provided all the groups are in cache. ''' k = str(predefined_groups).replace("b\'", '').replace("\'", "").replace("\'", "") res = ast.literal_eval(k) # print(articles[0]) for i in articles: word_tokens_post = word_tokenize(text_from_html(i)) for j in range(len(res)): w_token = word_tokenize(str(res[j])) filter_grp = [a for a in w_token if a not in stop] res[j] = ' '.join(filter_grp) #print(res) #lmtzr = WordNetLemmatizer() #group_list=str() #print('yesss') filtered_sentence = [a for a in word_tokens_post if a not in stop] filtered_sentence = [x.lower() for x in filtered_sentence] filtered_sentence = ' '.join(filtered_sentence) for j in range(len(res)): if res[j].lower() in filtered_sentence: print(str(res[j]))
def CacheClassifier(): reviews = Review.objects.filter(extracted=True) trainings_size = reviews.count() global_features_index = extract_features.GetGlobalFeaturesIndex() x, y = [], [] for r in reviews: features_vector = extract_features.GetFeaturesVector(r) global_features_vector = extract_features.FeaturesVectorToGlobal( features_vector, global_features_index) x.append(global_features_vector) y.append(r.tag.tagId) classifier = svm.SVC(decision_function_shape='ovo') try: classifier.fit(x, y) client = base.Client(('localhost', 11211)) model = (classifier, global_features_index, trainings_size) client.set(SVM_Model_KEY, pickle.dumps(model)) except: return None return model
def check_spam(user_id): client=base.Client(('localhost',11211)) json_data=client.get(str(user_id)) #print(json_data) str_data="".join( chr(x) for x in bytearray(json_data)) str_data='['+str_data.replace('\n',',').replace('null','"nothing"')+']' str_data=str_data.replace(',]',']') j_obj=json.loads(str_data) #print(j_obj[0]) link_list=list() phone_list=list() for i in range(len(j_obj)): link_list.append(re.findall(r'(https?://\S+)', j_obj[i]['description'])) phone_list.append(re.findall("|".join(["\+\d\d?-? ?\d{3}-\d{3}-\d{4}","\\+\\d\\d?-? ?\\d{10}","\\+\\d\\d?-? ?\\d{5} \\d{5}","\\+\\d\\d?-? ?\\d{3}\\-? ?\\d{7}","\\+\\d\\d?-? ?\\d{4}\\-? ?\\d{6}"]),j_obj[i]['description'])) link_list = [ y for x in link_list for y in x] print(link_list) phone_list = [ y for x in phone_list for y in x] print(phone_list) most_common,num_most_common = Counter(link_list).most_common(1)[0] print(str(most_common)+'-->'+str(num_most_common)) most_common,num_most_common = Counter(phone_list).most_common(1)[0] print(str(most_common)+'-->'+str(num_most_common)) '''if client.get(str(user_id)+'_contacts') == None: client.set(str(user_id)+'_contacts',json.dumps({ "url":','.join(link_list),"phone":','.join(phone_list)})+',',expire=604800) else: client.append(str(user_id)+'_contacts',json.dumps({ "url":','.join(link_list),"phone":','.join(phone_list)})+',',expire=604800)''' '''
def setup_class(self): """ Set up: connect client to server """ self.mc = base.Client(('localhost', 11211), serializer=serde.python_memcache_serializer, deserializer=serde.python_memcache_deserializer)
def __init__(self): conf = ConfigParser() path = os.path.join(os.path.dirname(__file__), 'config/config.txt') conf.read(path) self.iota = Iota(conf.get('IOTA', 'node'), conf.get('IOTA', 'seed')) self.generator = AddressGenerator(self.iota.seed) self.match_making_addr = self.generator.get_addresses(1) self.memcached = base.Client(('127.0.0.1', 11211))
def check_memcache(): cache = base.Client((Config.memcache_host, Config.memcache_port)) messages = ["hello, one", "hello, two", "hello, three"] cache.set('messages', json.dumps(messages)) data = cache.get('messages') stored_messages = json.loads(data) print(stored_messages) print('MEMCACHE [ok]')
def save_cache(key, value): try: log("cache - save key: " + key) client = base.Client((config.MemcacheServer, config.MemcachePort)) # 5 min expiration time client.set(key, value, config.MemcacheExpireTime) except: log("cache - problem with saving cache")
def _get_connection(self): """[Get connection] """ try: connection = base.Client((self.host, self.port), connect_timeout=5) except Exception: raise Exception("Cannot connect to memcached") return connection
def get_FilterSystem(guid,app): """return a FilterSystem corresponding to a guid""" mc = base.Client(('localhost', 11211)) fs_str = mc.get(guid) fs = pickle.loads(fs_str) return fs
def get_session(session_id): cache_client = base.Client((MEMCACHE_IP, MEMCACHE_PORT)) session = cache_client.get(session_id) if session: session = session.decode("utf-8") session = ast.literal_eval(session) cache_client.close() return session