def get_picklist(listname): if uwsgi.cache_exists(listname): return pickle.loads(uwsgi.cache_get(listname)) else: # three lists have special GETs but eventually they will be a picklist if listname == "tumorType": # picklist_values['tumorType']={ "cache_date": time.time(), "values":cache_oncotree()} uwsgi.cache_set(listname, pickle.dumps(cache_oncotree()), 900) elif listname == "Tag": # picklist_values['Tag']={ "cache_date": time.time(), "values": cache_barcodes()} uwsgi.cache_set(listname, pickle.dumps(cache_barcodes()), 900) if uwsgi.cache_get(listname) == None: return cache_barcodes() elif listname == "Reads+Coverage": uwsgi.cache_set("Reads+Coverage", pickle.dumps(cache_reads_coverage()), 900) else: r = s.get( LIMS_API_ROOT + "/getPickListValues?list=%s" % listname, auth=(LIMS_USER, LIMS_PW), verify=False, ) log_lims(r) picklist = [] for value in json.loads(r.content.decode('utf-8')): picklist.append({"id": value, "value": value}) uwsgi.cache_set(listname, pickle.dumps(picklist), 900) return pickle.loads(uwsgi.cache_get(listname))
def get(self): import uwsgi md5 = self.get_argument('md5', None) if md5 and md5 == uwsgi.cache_get(settings.ASSET_HASH_KEY): self.set_status(304) return scripts_content = uwsgi.cache_get(settings.ASSET_SCRIPTS_CONTENT_KEY) self.write(scripts_content)
def test_lru(self): self.assertTrue(uwsgi.cache_set('KEY1', 'X' * 20, 0, 'items_lru')) self.assertTrue(uwsgi.cache_set('KEY2', 'X' * 20, 0, 'items_lru')) self.assertTrue(uwsgi.cache_set('KEY3', 'Y' * 20, 0, 'items_lru')) self.assertIsNone(uwsgi.cache_get('KEY1', 'items_lru')) uwsgi.cache_get('KEY3', 'items_lru') for i in range(4, 100): self.assertTrue(uwsgi.cache_set('KEY%d' % i, 'Y' * 20, 0, 'items_lru')) self.assertIsNone(uwsgi.cache_get('KEY%d' % (i-2), 'items_lru'))
def cache_get(self, key, cache_server=None): ''' Get a value from the cache. key : The cache key to read. cache_server : The UNIX/TCP socket where the cache portal2 is listening. Optional. ''' if cache_server != None: return uwsgi.cache_get(key, cache_server) else: return uwsgi.cache_get(key)
def increment_request_count(user_id): worker_id = str(uwsgi.worker_id()) if uwsgi.cache_get(worker_id): c = int(uwsgi.cache_get(worker_id)) c += 1 uwsgi.cache_update(worker_id, str(c)) else: uwsgi.cache_set(worker_id, '0') return f"user_id:{user_id}:workder_id:{worker_id}:request_number:{uwsgi.cache_get(worker_id).decode()}"
def get(self): nlp_bytes = uwsgi.cache_get('nlp') if nlp_bytes or uwsgi.cache_get('busy'): if nlp_bytes: temp_nlp = pickle.loads(nlp_bytes) temp_nlp.close() uwsgi.cache_del('nlp') uwsgi.cache_del('busy') return 'success. closed.', 200 else: uwsgi.cache_del('busy') return 'success', 200 return 'Server already closed.', 304
def himawari8(target): last_updatetime = bottle.request.query.get("updatetime") getLayers = bottle.request.query.get("getLayers") https_verify = (bottle.request.query.get("https_verify") or "true").lower() == "true" baseUrl = bottle.request.url[0:bottle.request.url.find("/hi8")] if not getLayers.startswith("http"): getLayers = "{}{}".format(baseUrl, getLayers) key = "himawari8.{}".format(target) result = None getcaps = None if uwsgi.cache_exists("himawari8"): if uwsgi.cache_exists(key): result = json.loads(uwsgi.cache_get(key)) else: getcaps = uwsgi.cache_get("himawari8").decode("utf-8") else: res = requests.get(getLayers, verify=https_verify) res.raise_for_status() getcaps = res.content uwsgi.cache_set("himawari8", getcaps, 60 * 10) # cache for 10 mins getcaps = getcaps.decode("utf-8") if not result: layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps) layers = [] for layer in layernames: layers.append([ settings.PERTH_TIMEZONE.localize( datetime.datetime.strptime( re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")), layer ]) layers = sorted(layers, key=lambda layer: layer[0]) for layer in layers: layer[0] = (layer[0]).strftime("%a %b %d %Y %H:%M:%S AWST") result = { "servers": [baseUrl + FIREWATCH_SERVICE], "layers": layers, "updatetime": layers[len(layers) - 1][0] } uwsgi.cache_set(key, json.dumps(result), 60 * 10) # cache for 10 mins if len(result["layers"]) == 0: return bottle.HTTPResponse(status=404) elif last_updatetime and last_updatetime == result["updatetime"]: bottle.response.status = 290 return "{}" else: return result
def getAllTriggersAlarming(): triggerCached = cache_get('triggerTelao', cache_options['triggerGet']['name']) if triggerCached: return json.loads(triggerCached) elif cache_get('updatingCache', cache_options['updates']['name']) == 'True': while cache_get('updatingCache', cache_options['updates']['name']) == 'True': time.sleep(0.3) else: return json.loads( cache_get('triggerTelao', cache_options['updates']['name'])) else: if cache_exists('updatingCache', cache_options['updates']['name']): cache_update('updatingCache', 'True', cache_options['updates']['expiration_time'], cache_options['updates']['name']) else: cache_set('updatingCache', 'True', cache_options['updates']['expiration_time'], cache_options['updates']['name']) admin = Admin() zbx_admin_token = admin.auth() triggers = fowardZ.sendToZabbix(method='trigger.get', params={ 'selectHosts': ["name"], 'selectGroups': ['groups'], 'selectLastEvent': ['lastEvent', 'acknowledged'], 'expandComment': 1, 'expandDescription': 1, 'only_true': 1, 'output': 'extend' }, auth=zbx_admin_token) cache_set('triggerTelao', json.dumps(triggers), cache_options['triggerGet']['expiration_time'], cache_options['triggerGet']['name']) cache_update('updatingCache', 'False', cache_options['updates']['expiration_time'], cache_options['updates']['name']) return triggers
def demo_set(): # demo = TestDemo() # print(str(demo)) # print(sys.getsizeof(demo)) demo = 'a' * 2000 print('++++++++++++++++++++++++++') print('Size:', sys.getsizeof(demo)) print('++++++++++++++++++++++++++') try: # for k, v in uwsgi.__dict__.iteritems(): # print('{}: {}'.format(k, v)) # from remote_pdb import RemotePdb # RemotePdb('127.0.0.1', 4444).set_trace() # demo = uwsgi.cache_exists('demo') global num if num > 10: num = 0 key = 'cache_{}'.format(num) demo = pickle.dumps(demo) uwsgi.cache_set(key, demo, 0, 'mycache') print(len(uwsgi.cache_get(key, 'mycache'))) num += 1 return str(num) except: import traceback print(traceback.print_exc()) return 'not set'
def healthcheck(): fe_status_cached = cache_get('gbix_healthcheck', cache_options['default']['name']) if fe_status_cached: return fe_status_cached else: try: fe_status = fowardZ.sendRequest('{"jsonrpc": "2.0", \ "method": "apiinfo.version", \ "params": [], \ "id": 1 \ }') except: status_string = 'FAILED ' + socket.gethostname() + ' Sem conexao com o FE do Zabbix' cache_set('gbix_healthcheck', status_string, 3, cache_options['default']['name']) return status_string if 'result' in fe_status: status_string = 'WORKING ' + socket.gethostname() cache_set('gbix_healthcheck', status_string, 5, cache_options['default']['name']) return status_string else: status_string = 'FAILED ' + socket.gethostname() + 'Sem conexao com o FE do Zabbix' cache_set('gbix_healthcheck', status_string, 3, cache_options['default']['name']) return status_string
def methods(): gbix_methods_cached = cache_get('/api/methods', cache_options['default']['name']) if not gbix_methods_cached: try: arq = os.path.join( os.path.split(os.path.split(os.path.realpath(__file__))[0])[0], 'doc/api_data.json') f = open(arq) except IOError: return 'Methods file not found.' methods_json = str() pattern = re.compile('\s+"filename') for line in f.readlines(): if re.match(pattern, line) is None: methods_json += line.strip().replace('<p>', '').replace( '</p>', '').replace(' "', '"').replace('\\"', '') f.close() cache_set('/api/methods', methods_json, 0, cache_options['default']['name']) return methods_json else: return gbix_methods_cached
def healthcheck(): fe_status_cached = cache_get('gbix_healthcheck', cache_options['default']['name']) if fe_status_cached: return fe_status_cached else: try: fe_status = fowardZ.sendRequest('{"jsonrpc": "2.0", \ "method": "apiinfo.version", \ "params": [], \ "id": 1 \ }') except: status_string = 'FAILED ' + socket.gethostname( ) + ' Sem conexao com o FE do Zabbix' cache_set('gbix_healthcheck', status_string, 3, cache_options['default']['name']) return status_string if 'result' in fe_status: status_string = 'WORKING ' + socket.gethostname() cache_set('gbix_healthcheck', status_string, 5, cache_options['default']['name']) return status_string else: status_string = 'FAILED ' + socket.gethostname( ) + 'Sem conexao com o FE do Zabbix' cache_set('gbix_healthcheck', status_string, 3, cache_options['default']['name']) return status_string
def get_best_score(game_data): bestscorecache = uwsgi.cache_get('bestscore') if bestscorecache is None: logger.debug('Load scores') scores = storage.get_top_scores(10) uwsgi.cache_update('bestscore',json.dumps(scores).encode('utf-8')) bestscorecache = uwsgi.cache_get('bestscore') scorehash = hashlib.md5(bestscorecache).hexdigest() if scorehash != game_data.get('bestscore',''): logger.debug('Send new score to client') game_data['bestscore'] = scorehash return json.loads( bestscorecache.decode('utf-8') ); return None
def memopml(opmlurl): if cache_exists(opmlurl): return cache_get(opmlurl) else: value = jsonld_from_opml(loaded_opml(unquote(opmlurl),repo).opml) cache_set(opmlurl,value,3600*24) return value
def index(): try: url = request.params['url'] except KeyError: abort(400, 'Please provide a url') return # only for pylint url_hash = hash(url) cache_key = str(url_hash) lock_idx = url_hash % LOCKS with lock(lock_idx): cache = uwsgi.cache_get(cache_key) if cache == 'd': return invalid() if cache == 'u': return url try: req = urllib2.Request( url, headers={'User-Agent': USERAGENT} ) urllib2.urlopen(req, timeout=10) except: uwsgi.cache_set(cache_key, 'd', EXPIRE) return invalid() else: uwsgi.cache_set(cache_key, 'u', EXPIRE) return url
def memfeed(feedurl): if cache_exists(feedurl): return cache_get(feedurl) else: value = jsonld_from_feedparser(loaded_feed(unquote(feedurl),repo).feed) cache_set(feedurl,value,3600*24) return value
def wrapper(*args, **kwargs): wrapper.cfg = config_method(method) # __wrapped__ cache_id = wrapper.cfg.get(METHOD_CACHE_ID, None) cache_on = bool(cache_id) # not not cache_id cache_expires = int(wrapper.cfg.get(METHOD_CACHE_EXPIRES, "0")) args_spec = inspect.getfullargspec(func).args if cache_key_name in kwargs: key_val = kwargs[ cache_key_name] if cache_key_name in args_spec else kwargs.pop( cache_key_name) else: key_val = args[args_spec.index( cache_key_name)] # if not exists KeyError will be raised if isinstance(key_val, dict): key_val = hash( tuple(sorted(key_val.items(), key=lambda item: item[0]))) key_val = f'{method}:{str(key_val)}' if cache_on and uwsgi.cache_exists(key_val, cache_id): value = uwsgi.cache_get(key_val, cache_id) value = pickle.loads(value) logging.debug('cache [%s] -> %s : %s', cache_id, key_val, value) return value result = func(*args, **kwargs) if cache_on: value = pickle.dumps(result) uwsgi.cache_set(key_val, value, cache_expires, cache_id) logging.debug('cache [%s] <- %s expires %d : %s', cache_id, key_val, cache_expires, value) return result
def callback(): startup.getUserToken(request.args.get('code')) if not uwsgi.cache_exists('isRunning'): app.logger.info( "Creating new thread for refreshing spotify token and user stats.") uwsgi.cache_set('isRunning', 'True') uwsgi.cache_set('stop_threads', 'False') sp_t = spotify_thread(2500, "Thread-spotify") sp_t.start() try: if uwsgi.cache_get('isRunning').decode( 'utf-8') == 'True' and uwsgi.cache_get('stop_threads').decode( 'utf-8') == 'True': app.logger.info("Relancement de l'application spotify") uwsgi.cache_update('stop_threads', 'False') except AttributeError: app.logger.error( f"La variable isRunning ou stop_threads n'est pas initialisée, valeurs : ir:{uwsgi.cache_get('isRunning')} et st:{uwsgi.cache_get('stop_threads')}" ) list_time_range = ['short_term', 'medium_term', 'long_term'] list_type = ['artists', 'tracks'] dict_index = { 'short_term_artists': 1, 'medium_term_artists': 2, 'long_term_artists': 3, 'short_term_tracks': 4, 'medium_term_tracks': 5, 'long_term_tracks': 6 } for type in list_type: for time_range in list_time_range: set_analytics_data( dict_index[f"{time_range}_{type}"], json.dumps( json.loads( get_users_top( startup.getAccessToken()[1], type, time_range, ))), time_range, type) app.logger.info( f"All the threads are listed below : {[thread.name for thread in threading.enumerate()]}" ) return redirect(url_for('project_spotify.spotify'))
def cache_get_sids(cache=''): # Get a list of current sids online, only for external use store = [] for i in range(1, uwsgi.numproc + 1): data = uwsgi.cache_get(UWSGIManager.cache_worker_key % i, cache) if data is not None: store.extend(pickle.loads(data)) return store
def test_big_random(self): blob = self.rand_blob(100000) self.assertTrue(uwsgi.cache_set('KEY', blob, 0, 'items_1_100000')) get_blob = uwsgi.cache_get('KEY', 'items_1_100000') self.assertEqual(blob, get_blob) self.assertTrue(uwsgi.cache_del('KEY', 'items_1_100000')) self.assertIsNone(uwsgi.cache_set('KEY', 'X' * 100001, 0, 'items_1_100000')) self.assertTrue(uwsgi.cache_set('KEY', 'X' * 10000, 0, 'items_1_100000'))
def projects(): projects = get_all_projects() app.logger.info( f"In cache : isRunning : {uwsgi.cache_get('isRunning')} and stop_threads : {uwsgi.cache_get('stop_threads')}" ) try: return render_template( 'projects/projects.html', projects=projects, thread=True if uwsgi.cache_get('stop_threads').decode("utf-8") == 'False' and uwsgi.cache_get('isRunning').decode("utf-8") == 'True' else False) except: return render_template('projects/projects.html', projects=projects, thread=False)
def keep_alive(source_hash): data = {"source_hash": source_hash} reg_dump = cache_man.cache_get(source_hash) if reg_dump: cache_man.cache_update(source_hash, reg_dump, settings.DEFAULT_CACHE_TTL) return data, 200 return data, 404
def get_value(key): value = None if UWSGI: value = uwsgi.cache_get(key) if value is not None: value = pickle.loads(value) else: if key in _cache: value = _cache[key] return value
def _cache_get_msg(self, worker_id): for msg_id in range(0, 10): msg_key = self.cache_msg_key % (worker_id, msg_id) msg = uwsgi.cache_get(msg_key, self.cache) if msg is not None: logger.debug('Get and send message from worker %s - %s' % (self.worker_id, msg_key)) if worker_id: # delete message if worker_id is different from 0, else `short_cache_timeout` will do the job uwsgi.cache_del(msg_key, self.cache) yield msg
def check_status(): cache_data = None user_id = request.args.get('user_id') target_user_id = request.args.get('target_user_id') key = f'{str(user_id)}_{str(target_user_id)}' if uwsgi.cache_exists(key): cache_data = uwsgi.cache_get(key).decode('utf-8') status = {'status': cache_data} return Response(json.dumps(status), mimetype='application/json')
def post(self): if uwsgi.cache_get('busy') == b'1': return "Server busy", 409 args = parser.parse_args() return Response(nlp_module.run( args.debug, args.host, args.port, args.close, args.memory, args.input, args.output, args.arango, args.user, args.project, args.limit, args.pictures, args.summary, args.relations, args.corefs, args.newgraph, args.documentedges), mimetype='text/event-stream', headers={'X-Accel-Buffering': 'no'})
def process_request(json_in): uid = str(uuid.uuid4()) json_in["id"] = uid uwsgi.queue_push(json.dumps(json_in)) # Actual content of message does not really matter # This is just to triger mule execution uwsgi.mule_msg("s") while not uwsgi.cache_exists(uid, CACHE_NAME): continue res = uwsgi.cache_get(uid, CACHE_NAME) uwsgi.cache_del(uid, CACHE_NAME) return Response(response=res, status=200, mimetype="application/json")
def invoke(source_hash, method_name, packed_data): data = rpc_data_unpack(packed_data) logging.debug(str(data)) args = data["args"] kwargs = data["kwargs"] namespace = data["namespace"] settings = data["settings"] logging.debug(f"args: {args}") logging.debug(f"kwargs: {kwargs}") logging.debug(f"namespace: {namespace}", ) if not cache_man.cache_exists(source_hash): return {"error": source_hash}, 404 reg_dump = pickle.loads(cache_man.cache_get(source_hash)) if method_name not in reg_dump: return {"error": f"{source_hash}/{method_name}"}, 404 fn_globals = build_namespace(reg_dump[method_name]["imports"], namespace) fn = reg_dump[method_name]["code"].make_fn(fn_globals) result = None std_stream_subst = io.StringIO() if settings.get("return_stdout"): sys.stdout = std_stream_subst if settings.get("return_stderr"): sys.stderr = std_stream_subst try: result = fn(*args, **kwargs) except Exception as e: logging.warning( f"Method {method_name} failed with exception {e.__class__.__name__}:" ) result = RpcRemoteException(e) except SystemExit as se: result = se.code finally: if settings.get("return_stdout") or settings.get("return_stderr"): if settings.get("return_stdout"): sys.stdout = sys.__stdout__ if settings.get("return_stderr"): sys.stdout = sys.__stderr__ std_stream_subst.seek(0) fn_std_all = std_stream_subst.readlines() else: fn_std_all = [] return rpc_data_pack({"return": result, "fn_output": fn_std_all}), 200
def tell_me_if_im_going_to_die(lat, lon, meters): try: import uwsgi DEBUG_LEVEL = uwsgi.cache_get("DEBUG") if DEBUG_LEVEL is not None: return DEBUG_LEVEL except ImportError: pass lat, lon, meters = float(lat), float(lon), float(meters) response = get_crime_near(lat, lon, meters) level = process_crime_level(response) return str(level)
def himawari8(target): last_updatetime = bottle.request.query.get("updatetime") baseUrl = bottle.request.url[0:bottle.request.url.find("/hi8")] key = "himawari8.{}".format(target) result = None getcaps = None if uwsgi.cache_exists("himawari8"): if uwsgi.cache_exists(key): result = json.loads(uwsgi.cache_get(key)) else: getcaps = uwsgi.cache_get("himawari8") else: res = requests.get("{}{}".format(baseUrl,FIREWATCH_GETCAPS),verify=FIREWATCH_HTTPS_VERIFY) res.raise_for_status() getcaps = res.content getcaps = getcaps.decode("utf-8") uwsgi.cache_set("himawari8", getcaps, 60*10) # cache for 10 mins if not result: layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps) layers = [] for layer in layernames: layers.append([settings.PERTH_TIMEZONE.localize(datetime.datetime.strptime(re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")), layer]) layers = sorted(layers,key=lambda layer:layer[0]) for layer in layers: layer[0] = (layer[0]).strftime("%a %b %d %Y %H:%M:%S AWST") result = { "servers": [baseUrl + FIREWATCH_SERVICE], "layers": layers, "updatetime":layers[len(layers) - 1][0] } uwsgi.cache_set(key, json.dumps(result), 60*10) # cache for 10 mins if len(result["layers"]) == 0: return bottle.HTTPResponse(status=404) elif last_updatetime and last_updatetime == result["updatetime"]: bottle.response.status = 290 return "{}" else: return result
def application(env, start_response): if uwsgi.loop == 'gevent': gevent.sleep() start_response('200 OK', [('Content-Type', 'text/html')]) yield "foobar<br/>" if uwsgi.loop == 'gevent': gevent.sleep(10) yield str(env['wsgi.input'].fileno()) yield "<h1>Hello World</h1>" try: yield uwsgi.cache_get('foo') except: pass
def get_url(url): """Download a file from url to cache_dir.""" # set a lock to prevent multiple simultaneous downloads of the same file mypid = os.getpid() uwsgi.lock() otherpid = uwsgi.cache_get(url) if otherpid: uwsgi.unlock() while otherpid: log('D: [%d] waiting for pid %s to download %s' % (mypid, otherpid, url)) time.sleep(1) otherpid = uwsgi.cache_get(url) return 200 else: uwsgi.cache_set(url, str(mypid)) uwsgi.unlock() dest = localfile(url) log('D: [%d] downloading %s to %s' % (mypid, url, dest)) curl = pycurl.Curl() curl.setopt(curl.URL, url) curl.setopt(curl.FOLLOWLOCATION, True) path = '/'.join(dest.split('/')[:-1]) if not os.path.exists(path): # parallel download of rpms in subdir will create it right now try: os.makedirs(path) except OSError as e: # this catches duplicate creation (so just W not E) # TODO: need to bypass the open() on real errors # like permissions log('W: [%d] OS error(%d): %s' % (mypid, e.errno, e.strerror)) with open(dest, 'wb') as fil: curl.setopt(curl.WRITEFUNCTION, fil.write) curl.perform() uwsgi.cache_del(url) return curl.getinfo(curl.HTTP_CODE)
def application(env, start_response): if uwsgi.loop == 'gevent': gevent.sleep() start_response('200 OK', [('Content-Type', 'text/html')]) yield "foobar<br/>" if uwsgi.loop == 'gevent': gevent.sleep(3) yield str(env['wsgi.input'].fileno()) yield "<h1>Hello World</h1>" try: yield uwsgi.cache_get('foo') except: pass
def get_url(url): """Download a file from url to cache_dir.""" # set a lock to prevent multiple simultaneous downloads of the same # file mypid = os.getpid() uwsgi.lock() otherpid = uwsgi.cache_get(url) if otherpid: uwsgi.unlock() while otherpid: log('D: pid %d waiting for pid %s to download %s' % (mypid, otherpid, url)) time.sleep(1) otherpid = uwsgi.cache_get(url) return 200 else: uwsgi.cache_set(url, str(mypid)) uwsgi.unlock() dest = localfile(url) log('D: pid %d downloading %s' % (mypid, url)) curl = pycurl.Curl() curl.setopt(curl.URL, url) path = '/'.join(dest.split('/')[:-1]) if not os.path.exists(path): # parallel download of rpms in subdir will create it right now try: os.makedirs(path) except OSError as e: # this catches duplicate creation (so just W not E) # TODO: need to bypass the open() on real errors # like permissions log('W: OS error(%d): %s' % (e.errno, e.strerror)) with open(dest, 'wb') as fil: curl.setopt(curl.WRITEFUNCTION, fil.write) curl.perform() uwsgi.cache_del(url) return curl.getinfo(curl.HTTP_CODE)
def application(env, start_response): print env["wsgi.input"].read() if uwsgi.loop == "gevent": gevent.sleep() start_response("200 OK", [("Content-Type", "text/html")]) yield "foobar<br/>" if uwsgi.loop == "gevent": gevent.sleep(3) yield str(env["wsgi.input"].fileno()) yield "<h1>Hello World</h1>" try: yield uwsgi.cache_get("foo") except: pass
def sighandler(signum): now = int(time.time()) key = 'scheduler_call_time_signal_' + str(signum) uwsgi.lock() try: updating = uwsgi.cache_get(key) if updating is not None: updating = int.from_bytes(updating, 'big') if now - updating < delay: return uwsgi.cache_update(key, now.to_bytes(4, 'big')) finally: uwsgi.unlock() func(*args)
def objects(cls): if not uwsgi.cache_exists("raw_aws"): if hasattr(cls, "_objects"): del cls._objects uwsgi.cache_set("raw_aws", subprocess.check_output(["aws", "ec2", "describe-instances", "--no-paginate"]), 60*15) raw = json.loads(uwsgi.cache_get("raw_aws").decode("utf-8")) if hasattr(cls, "_objects"): return cls._objects objects = {} for data in raw["Reservations"]: for instance_data in data["Instances"]: instance = Instance(instance_data=instance_data) objects[instance.instance_id] = instance cls._objects = objects return objects # A dict
def register(source_hash, method_name, packed_data): logging.info("{}: {}".format(request.method, request.url)) data = rpc_data_unpack(request.get_data()) logging.debug(str(data)) fn_data = {method_name: data} if cache_man.cache_exists(source_hash): reg_dump = pickle.loads(cache_man.cache_get(source_hash)) reg_dump.update(fn_data) cache_man.cache_update(source_hash, pickle.dumps(reg_dump), settings.DEFAULT_CACHE_TTL) else: cache_man.cache_set(source_hash, pickle.dumps(fn_data), settings.DEFAULT_CACHE_TTL)
def _cache_worker_id(self, sid): """ Get worker_id from sid else return 0. :type sid: str :rtype: int """ if sid in self.sids: return self._worker_id wid = 0 for i in (i for i in range(1, uwsgi.numproc + 1) if i != self._worker_id): store = pickle.loads( uwsgi.cache_get(self.cache_worker_key % i, self.cache)) if sid in store: wid = i break return wid
def himawari8(target): if uwsgi.cache_exists("himawari8"): getcaps = uwsgi.cache_get("himawari8") else: getcaps = requests.get(FIREWATCH_GETCAPS).content uwsgi.cache_set("himawari8", getcaps, 60*10) # cache for 10 mins getcaps = getcaps.decode("utf-8") layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps) layers = [] for layer in layernames: layers.append([FIREWATCH_TZ.localize(datetime.strptime(re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")).isoformat(), layer]) result = { "servers": [FIREWATCH_SERVICE], "layers": layers } return result
def bom(target): last_updatetime = bottle.request.query.get("updatetime") layeridpattern = bottle.request.query.get("layeridpattern") if layeridpattern: layeridpattern = "bom:{}".format(layeridpattern) else: layeridpattern = "bom:{}{{:0>3}}".format(target) current_timeline = None try: current_timeline = json.loads(uwsgi.cache_get(target)) except: current_timeline = None bottle.response.set_header("Content-Type", "application/json") bottle.response.status = 200 if current_timeline and datetime.datetime.now( ) - datetime.datetime.strptime( current_timeline["refreshtime"], "%a %b %d %Y %H:%M:%S") < datetime.timedelta(minutes=5): # data is refreshed within 5 minutes, use the result directly if current_timeline["updatetime"] == last_updatetime: # return 304 cause "No element found" error, so return a customized code to represent the same meaning as 304 bottle.response.status = 290 return "{}" else: return { "layers": current_timeline["layers"], "updatetime": current_timeline["updatetime"] } timeline = getTimelineFromWmsLayer(current_timeline, bomLayerIdFunc(layeridpattern)) if not timeline: raise "Missing some of http parameters 'basetimelayer', 'timelinesize', 'layertimespan'." if not current_timeline or id(timeline) != id(current_timeline): uwsgi.cache_set(target, json.dumps(timeline), 0) if timeline["updatetime"] == last_updatetime: bottle.response.status = 290 return "{}" else: return { "layers": timeline["layers"], "updatetime": timeline["updatetime"] }
def get_tests(): """Retrieve test execution status upon successful GET.""" if "receipt" not in request.args: return make_response( jsonify({"error": "Required 'receipt' parameter not found."}), 400) receipt = request.args.get("receipt") if not receipt: return make_response( jsonify({ "error": "Required 'receipt' parameter found with an empty value." }), 400) test_status = uwsgi.cache_get(receipt, "receipts") if test_status is None: return make_response( jsonify({"error": "Provided 'receipt' not found."}), 404) return jsonify(json.loads(test_status))
def methods(): gbix_methods_cached = cache_get('/api/methods', cache_options['default']['name']) if not gbix_methods_cached: try: arq = os.path.join(os.path.split(os.path.split(os.path.realpath(__file__))[0])[0], 'doc/api_data.json') f = open(arq) except IOError: return 'Methods file not found.' methods_json = str() pattern = re.compile('\s+"filename') for line in f.readlines(): if re.match(pattern, line) is None: methods_json += line.strip().replace('<p>', '').replace('</p>', '').replace(' "', '"').replace('\\"', '') f.close() cache_set('/api/methods', methods_json, 0, cache_options['default']['name']) return methods_json else: return gbix_methods_cached
def bom(target): last_updatetime = bottle.request.query.get("updatetime") layeridpattern = bottle.request.query.get("layeridpattern") if layeridpattern: layeridpattern = "bom:{}".format(layeridpattern) else: layeridpattern = "bom:{}{{:0>3}}".format(target) current_timeline = None try: current_timeline = json.loads(uwsgi.cache_get(target)) except: current_timeline = None bottle.response.set_header("Content-Type", "application/json") bottle.response.status = 200 if current_timeline and datetime.datetime.now() - datetime.datetime.strptime(current_timeline["refreshtime"], "%a %b %d %Y %H:%M:%S") < datetime.timedelta(minutes=5): # data is refreshed within 5 minutes, use the result directly if current_timeline["updatetime"] == last_updatetime: # return 304 cause "No element found" error, so return a customized code to represent the same meaning as 304 bottle.response.status = 290 return "{}" else: return {"layers": current_timeline["layers"], "updatetime": current_timeline["updatetime"]} timeline = getTimelineFromWmsLayer(current_timeline, bomLayerIdFunc(layeridpattern)) if not timeline: raise "Missing some of http parameters 'basetimelayer', 'timelinesize', 'layertimespan'." if not current_timeline or id(timeline) != id(current_timeline): uwsgi.cache_set(target, json.dumps(timeline), 0) if timeline["updatetime"] == last_updatetime: bottle.response.status = 290 return "{}" else: return {"layers": timeline["layers"], "updatetime": timeline["updatetime"]}
def get_container_ip(client, container_name): cache_key = '{}_{}_IP'.format(client.base_url, container_name) return (uwsgi.cache_get(cache_key) or _update_container_ip(client, container_name, cache_key))
def get_layerdefinition(layerids,kmiserver="https://kmi.dbca.wa.gov.au/geoserver",results={}): kmiserver = get_kmiserver(kmiserver) multiple_layers = True if isinstance(layerids,basestring): layerids = [layerids] multiple_layers = False #group layers against layer workspace layers = {} for layerid in layerids: layerid = layerid.strip() #check whether it is cached or not key = layerdefinitionkey(layerid) if uwsgi.cache_exists(key): try: definitiondata = uwsgi.cache_get(key) if definitiondata: if layerid in results: results[layerid].update(json.loads(definitiondata)) else: results[layerid] = json.loads(definitiondata) continue except: pass layer = layerid.split(":") if len(layer) == 1: #no workspace layer_ws = "" layer = layer[0] else: layer_ws = layer[0] layer = layer[1] if layer_ws not in layers: layers[layer_ws] = [layerid] else: layers[layer_ws].append(layerid) if layers: kmiserver = get_kmiserver(kmiserver) session_cookie = settings.get_session_cookie() url = None for layer_ws,layers in layers.iteritems(): if layer_ws: url = "{}/{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(kmiserver,layer_ws,",".join(layers)) else: url = "{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(kmiserver,",".join(layers)) res = requests.get( url, verify=False, cookies=session_cookie ) res.raise_for_status() layersdata = res.json() for layer in layersdata.get("featureTypes") or []: if layer_ws: layerid = "{}:{}".format(layer_ws,layer["typeName"]) else: layerid = layer["typeName"] try: index = layers.index(layerid) except: index = -1 if index >= 0: #this layer's metadata is requsted by the user if layerid in results: result = results[layerid] else: result = {"id":layerid} results[layerid] = result result["properties"] = layer["properties"] result["geometry_property"] = None result["geometry_properties"] = [] result["geometry_type"] = None result["geometry_property_msg"] = None del layers[index] #find spatial columns for prop in layer["properties"]: if prop["type"].startswith("gml:"): #spatial column result["geometry_properties"].append(prop) if len(result["geometry_properties"]) == 1: result["geometry_property"] = result["geometry_properties"][0] result["geometry_type"] = result["geometry_properties"][0]["localType"].lower() elif len(result["geometry_properties"]) > 1: #have more than one geometry properties, try to find the right one if layer_ws: url = "{}/{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(kmiserver,layer_ws,layerid) else: url = "{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(kmiserver,layerid) res = requests.get( url, verify=False, cookies=session_cookie ) res.raise_for_status() featuresdata = res.json() if len(featuresdata["features"]) > 0: feat = featuresdata["features"][0] for prop in result["geometry_properties"]: if prop["name"] == feat["geometry_name"]: result["geometry_property"] = prop result["geometry_type"] = prop["localType"].lower() break if not result["geometry_property"]: result["geometry_property_msg"] = "Layer '{}' has more than one geometry columns, can't identity which column is used as the geometry column.".format(layerid) else: result["geometry_property_msg"] = "Layer '{}' is not a spatial layer".format(layerid) if result["geometry_property"]: #found the geometry property, remove it from properties index = len(result["properties"]) - 1 while index >= 0: if result["properties"][index] == result["geometry_property"]: #this is the geometry property,remove it from properties del result["properties"][index] break index -= 1 #cache it for 1 day key = layerdefinitionkey(layerid) try: if uwsgi.cache_exists(key): uwsgi.cache_update(key, json.dumps(result),24 * 3600) else: uwsgi.cache_set(key, json.dumps(result),24 * 3600) except: pass if len(layers) == 1: if layer_ws: raise Exception("The layer({}:{}) Not Found".format(layer_ws,layers[0])) else: raise Exception("The layer({}) Not Found".format(layers[0])) elif len(layers) > 1: if layer_ws: raise Exception("The layers({}) Not Found".format(",".join(["{}:{}".format(layer_ws,l) for l in layers]))) else: raise Exception("The layers({}) Not Found".format(",".join(layers))) if multiple_layers: return results else: return results[layerids[0]]
def get_layermetadata(layerids,kmiserver="https://kmi.dbca.wa.gov.au/geoserver",results={}): multiple_layers = True if isinstance(layerids,basestring): layerids = [layerids] multiple_layers = False #group layers against layer workspace layers = {} for layerid in layerids: layerid = layerid.strip() #check whether it is cached or not key = layermetadatakey(layerid) if uwsgi.cache_exists(key): try: metadata = uwsgi.cache_get(key) if metadata: if layerid in results: results[layerid].update(json.loads(metadata)) else: results[layerid] = json.loads(metadata) #print("Retrieve the metadata from cache for layer ({})".format(layerid)) continue except: pass layer = layerid.split(":") if len(layer) == 1: #no workspace layer_ws = "" layer = layer[0] else: layer_ws = layer[0] layer = layer[1] if layer_ws not in layers: layers[layer_ws] = [layer] else: layers[layer_ws].append(layer) if layers: session_cookie = settings.get_session_cookie() kmiserver = get_kmiserver(kmiserver) #find the layer's metadata url = None for layer_ws,layers in layers.iteritems(): if layer_ws: url = "{}/{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(kmiserver,layer_ws) else: url = "{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(kmiserver) res = requests.get( url, verify=False, cookies=session_cookie ) res.raise_for_status() tree = ET.fromstring(res.content) capability = tree.find('Capability') if not len(capability): raise Exception("getCapability failed") kmi_layers = capability.findall("Layer") while kmi_layers: kmi_layer = kmi_layers.pop() name = get_child_value(kmi_layer,"Name") if name: try: index = layers.index(name) except: index = -1 if index >= 0: #this layer's metadata is requsted by the user if layer_ws: layerid = "{}:{}".format(layer_ws,name) else: layerid = name if layerid in results: result = results[layerid] else: result = {"id":layerid} results[layerid] = result del layers[index] result["title"] = get_child_value(kmi_layer,"Title") result["abstract"] = get_child_value(kmi_layer,"Abstract") result["srs"] = get_child_value(kmi_layer,"SRS") bbox = kmi_layer.find("LatLonBoundingBox") if bbox is not None: result["latlonBoundingBox"] = [float(bbox.attrib["miny"]),float(bbox.attrib["minx"]),float(bbox.attrib["maxy"]),float(bbox.attrib["maxx"])] else: result["latlonBoundingBox"] = None for bbox in kmi_layer.findall("BoundingBox"): result["latlonBoundingBox_{}".format(bbox.attrib["SRS"].upper())] = [float(bbox.attrib["miny"]),float(bbox.attrib["minx"]),float(bbox.attrib["maxy"]),float(bbox.attrib["maxx"])] #cache it for 6 hours key = layermetadatakey(result["id"]) try: if uwsgi.cache_exists(key): uwsgi.cache_update(key, json.dumps(result),6 * 3600) else: uwsgi.cache_set(key, json.dumps(result),6 * 3600) except: pass #print("Retrieve the metadata from kmi for layer ({})".format(result["id"])) if len(layers): continue else: #already find metadata for all required layers break sub_layers = kmi_layer.findall("Layer") if sub_layers: kmi_layers += sub_layers if len(layers) == 1: if layer_ws: raise Exception("The layer({}:{}) Not Found".format(layer_ws,layers[0])) else: raise Exception("The layer({}) Not Found".format(layers[0])) elif len(layers) > 1: if layer_ws: raise Exception("The layers({}) Not Found".format(",".join(["{}:{}".format(layer_ws,l) for l in layers]))) else: raise Exception("The layers({}) Not Found".format(",".join(layers))) if multiple_layers: return results else: return results[layerids[0]]
def hello_timer(num): i = int(uwsgi.cache_get('Service2Timer')) i += 1 uwsgi.cache_update('Service2Timer', str(i))
def _get_profile(app): #get app profile profile = None appPath = os.path.join(DIST_PATH,"{}.js".format(app)) if not os.path.exists(appPath): appPath = os.path.join(DIST_PATH,"sss.js") key = "{}_profile".format(app) profileChanged = False if uwsgi.cache_exists(key): profile = uwsgi.cache_get(key) if profile: profile = json.loads(profile) if repr(os.path.getmtime(appPath)) != profile["mtime"] or os.path.getsize(appPath) != profile["size"]: profileChanged = True profile = None if not profile: file_data = None with open(appPath,"rb") as f: file_data = f.read() m = profile_re.search(file_data) profile = m.group("profile") if m else "{}" profile = { 'mtime':repr(os.path.getmtime(appPath)), 'size':os.path.getsize(appPath), 'profile':demjson.decode(profile) } m = hashlib.md5() m.update(file_data) profile['profile']['build']['md5'] = base64.urlsafe_b64encode(m.digest()).rstrip("=") file_data = None if profileChanged: uwsgi.cache_update(key, json.dumps(profile)) else: uwsgi.cache_set(key, json.dumps(profile)) profile["profile"]["dependents"] = {} #get vendor md5 vendorPath = os.path.join(DIST_PATH,"vendor.js") if not os.path.exists(vendorPath): raise Exception("Vendor library not found") key = "{}_profile".format("vendor") profileChanged = False vendorProfile = None if uwsgi.cache_exists(key): vendorProfile = uwsgi.cache_get(key) if vendorProfile: vendorProfile = json.loads(vendorProfile) if repr(os.path.getmtime(vendorPath)) != vendorProfile["mtime"] or os.path.getsize(vendorPath) != vendorProfile["size"]: profileChanged = True vendorProfile = None if not vendorProfile: m = hashlib.md5() with open(vendorPath,"rb") as f: m.update(f.read()) vendorProfile = { 'mtime':repr(os.path.getmtime(vendorPath)), 'size':os.path.getsize(vendorPath), 'vendorMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(vendorProfile)) else: uwsgi.cache_set(key, json.dumps(vendorProfile)) profile["profile"]["dependents"]["vendorMD5"] = vendorProfile["vendorMD5"] #get env profile envPath = os.path.join(BASE_DIST_PATH,'release','static','js',"{}-{}.env.js".format(app,ENV_TYPE)) if not os.path.exists(envPath): raise Exception("'{}-{}.env.js' is missing.".format(app,ENV_TYPE)) else: key = "{}_{}_profile".format("env",ENV_TYPE) profileChanged = False envProfile = None if uwsgi.cache_exists(key): envProfile = uwsgi.cache_get(key) if envProfile: envProfile = json.loads(envProfile) if repr(os.path.getmtime(envPath)) != envProfile["mtime"] or os.path.getsize(envPath) != envProfile["size"]: profileChanged = True envProfile = None if not envProfile: m = hashlib.md5() with open(envPath,"rb") as f: m.update(f.read()) envProfile = { 'mtime':repr(os.path.getmtime(envPath)), 'size':os.path.getsize(envPath), 'envMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(envProfile)) else: uwsgi.cache_set(key, json.dumps(envProfile)) profile["profile"]["dependents"]["envMD5"] = envProfile["envMD5"] profile["profile"]["envType"] = ENV_TYPE #get style profile stylePath = os.path.join(BASE_DIST_PATH,'release','static','css',"style.css") if not os.path.exists(stylePath): raise Exception("'style.css' is missing.") else: key = "style_profile" profileChanged = False styleProfile = None if uwsgi.cache_exists(key): styleProfile = uwsgi.cache_get(key) if styleProfile: styleProfile = json.loads(styleProfile) if repr(os.path.getmtime(stylePath)) != styleProfile["mtime"] or os.path.getsize(stylePath) != styleProfile["size"]: profileChanged = True styleProfile = None if not styleProfile: m = hashlib.md5() with open(stylePath,"rb") as f: m.update(f.read()) styleProfile = { 'mtime':repr(os.path.getmtime(stylePath)), 'size':os.path.getsize(stylePath), 'styleMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(styleProfile)) else: uwsgi.cache_set(key, json.dumps(styleProfile)) profile["profile"]["dependents"]["styleMD5"] = styleProfile["styleMD5"] return profile["profile"]
def fetch(cls, time): routes = uwsgi.cache_get('binds') if routes: cls.routes = loads(routes) cls.update = time
def _cache_get_tenant_id(token): return uwsgi.cache_get(token, _CONFIG.cache.cache_name)
def get(self, cache, key): return uwsgi.cache_get(key, cache)
def __getitem__(self, item): return uwsgi.cache_get(item)
import string items = {} def gen_rand_n(max_n): return random.randint(8, max_n) def gen_rand_s(size): return ''.join( [ random.choice(string.letters) for i in range(size) ]) print 'filling cache...' for i in range(0, 1000): kl = gen_rand_n(200) key = gen_rand_s(kl) vl = gen_rand_n(10000) val = gen_rand_s(vl) items[key] = val uwsgi.cache_set(key, val) print 'checking cache...' count = 0 for key in items.keys(): val = uwsgi.cache_get(key) count += 1 if val != items[key]: print len(val), val print len(items[key]), items[key] raise Exception('CACHE TEST FAILED AFTER %d ITERATIONS !!!' % count) print "TEST PASSED"
def counter(self, cookie, request, response): i = int(uwsgi.cache_get('Service2Counter')) i += 1 uwsgi.cache_update('Service2Counter', str(i)) return "{0} {1}".format(i, uwsgi.cache_get('Service2Timer'))
def screenx_cache_get(k): if UWSGI_ENV: v = int(uwsgi.cache_get(k)) else: v = SCREENX_CACHE.get(k) return v
def get(self, key): ''' Looks up the value of the given key. Returns the value if the key exists, None otherwise ''' return uwsgi.cache_get(key)
def _cached_token_exists(token): if uwsgi.cache_get(token, _CONFIG.cache.cache_name) is not None: return True return False