def cache_exists(self, key, cache_server=None): ''' Quickly check whether there is a value in the cache associated with the given key. key : The cache key to check.uwsgi_util cache_server : The UNIX/TCP socket where the cache portal2 is listening. Optional. ''' if cache_server != None: return uwsgi.cache_exists(key, cache_server) else: return uwsgi.cache_exists(key)
def __init__(self): ServiceBase.__init__(self) ProfileMixin.__init__(self) DispatcherMixin_CRR.__init__(self) if not uwsgi.cache_exists('Service2Counter'): uwsgi.cache_set('Service2Counter', '0') if not uwsgi.cache_exists('Service2Timer'): uwsgi.cache_set('Service2Timer', '0') print uwsgi.queue_size gevent.spawn(microtask, uwsgi.worker_id()) print 'after gevent.spawn'
def himawari8(target): last_updatetime = bottle.request.query.get("updatetime") getLayers = bottle.request.query.get("getLayers") https_verify = (bottle.request.query.get("https_verify") or "true").lower() == "true" baseUrl = bottle.request.url[0:bottle.request.url.find("/hi8")] if not getLayers.startswith("http"): getLayers = "{}{}".format(baseUrl, getLayers) key = "himawari8.{}".format(target) result = None getcaps = None if uwsgi.cache_exists("himawari8"): if uwsgi.cache_exists(key): result = json.loads(uwsgi.cache_get(key)) else: getcaps = uwsgi.cache_get("himawari8").decode("utf-8") else: res = requests.get(getLayers, verify=https_verify) res.raise_for_status() getcaps = res.content uwsgi.cache_set("himawari8", getcaps, 60 * 10) # cache for 10 mins getcaps = getcaps.decode("utf-8") if not result: layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps) layers = [] for layer in layernames: layers.append([ settings.PERTH_TIMEZONE.localize( datetime.datetime.strptime( re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")), layer ]) layers = sorted(layers, key=lambda layer: layer[0]) for layer in layers: layer[0] = (layer[0]).strftime("%a %b %d %Y %H:%M:%S AWST") result = { "servers": [baseUrl + FIREWATCH_SERVICE], "layers": layers, "updatetime": layers[len(layers) - 1][0] } uwsgi.cache_set(key, json.dumps(result), 60 * 10) # cache for 10 mins if len(result["layers"]) == 0: return bottle.HTTPResponse(status=404) elif last_updatetime and last_updatetime == result["updatetime"]: bottle.response.status = 290 return "{}" else: return result
def memrefresh(feedurl): value = jsonld_from_feedparser(loaded_feed(unquote(feedurl),repo,True).feed) if cache_exists(feedurl): cache_update(feedurl,value,3600*24) else: cache_set(feedurl,value,3600*24) return value
def memfeed(feedurl): if cache_exists(feedurl): return cache_get(feedurl) else: value = jsonld_from_feedparser(loaded_feed(unquote(feedurl),repo).feed) cache_set(feedurl,value,3600*24) return value
def memopml(opmlurl): if cache_exists(opmlurl): return cache_get(opmlurl) else: value = jsonld_from_opml(loaded_opml(unquote(opmlurl),repo).opml) cache_set(opmlurl,value,3600*24) return value
def wrapper(*args, **kwargs): wrapper.cfg = config_method(method) # __wrapped__ cache_id = wrapper.cfg.get(METHOD_CACHE_ID, None) cache_on = bool(cache_id) # not not cache_id cache_expires = int(wrapper.cfg.get(METHOD_CACHE_EXPIRES, "0")) args_spec = inspect.getfullargspec(func).args if cache_key_name in kwargs: key_val = kwargs[ cache_key_name] if cache_key_name in args_spec else kwargs.pop( cache_key_name) else: key_val = args[args_spec.index( cache_key_name)] # if not exists KeyError will be raised if isinstance(key_val, dict): key_val = hash( tuple(sorted(key_val.items(), key=lambda item: item[0]))) key_val = f'{method}:{str(key_val)}' if cache_on and uwsgi.cache_exists(key_val, cache_id): value = uwsgi.cache_get(key_val, cache_id) value = pickle.loads(value) logging.debug('cache [%s] -> %s : %s', cache_id, key_val, value) return value result = func(*args, **kwargs) if cache_on: value = pickle.dumps(result) uwsgi.cache_set(key_val, value, cache_expires, cache_id) logging.debug('cache [%s] <- %s expires %d : %s', cache_id, key_val, cache_expires, value) return result
def get_picklist(listname): if uwsgi.cache_exists(listname): return pickle.loads(uwsgi.cache_get(listname)) else: # three lists have special GETs but eventually they will be a picklist if listname == "tumorType": # picklist_values['tumorType']={ "cache_date": time.time(), "values":cache_oncotree()} uwsgi.cache_set(listname, pickle.dumps(cache_oncotree()), 900) elif listname == "Tag": # picklist_values['Tag']={ "cache_date": time.time(), "values": cache_barcodes()} uwsgi.cache_set(listname, pickle.dumps(cache_barcodes()), 900) if uwsgi.cache_get(listname) == None: return cache_barcodes() elif listname == "Reads+Coverage": uwsgi.cache_set("Reads+Coverage", pickle.dumps(cache_reads_coverage()), 900) else: r = s.get( LIMS_API_ROOT + "/getPickListValues?list=%s" % listname, auth=(LIMS_USER, LIMS_PW), verify=False, ) log_lims(r) picklist = [] for value in json.loads(r.content.decode('utf-8')): picklist.append({"id": value, "value": value}) uwsgi.cache_set(listname, pickle.dumps(picklist), 900) return pickle.loads(uwsgi.cache_get(listname))
def add(self, key, value, timeout=None): ''' Checks if the given key is in the cache. If it is, returns False and does not overwrite the value If the value is not in the cache, this method acts like set() ''' if uwsgi.cache_exists(key): return False return self.set(key, value, timeout=timeout)
def set(self, key, value, expires=0): key = str(key) value = pickle.dumps(value) if uwsgi.cache_exists(key, self.cachename): uwsgi.cache_update(key, value, expires, self.cachename) else: uwsgi.cache_set(key, value, expires, self.cachename) self._keys.add(key)
def set_value(key, value): if UWSGI: if uwsgi.cache_exists(key): uwsgi.cache_update(key, pickle.dumps(value)) else: uwsgi.cache_set(key, pickle.dumps(value)) else: _cache[key] = value
def reset_debug_level(level): import uwsgi if level == '-1': uwsgi.cache_del("DEBUG") else: if uwsgi.cache_exists("DEBUG"): uwsgi.cache_update("DEBUG", level) else: uwsgi.cache_set("DEBUG", level) return redirect(url_for('tell_me_if_im_going_to_die', lat=39.9708657, lon=-75.1427425, meters=1000))
def check_status(): cache_data = None user_id = request.args.get('user_id') target_user_id = request.args.get('target_user_id') key = f'{str(user_id)}_{str(target_user_id)}' if uwsgi.cache_exists(key): cache_data = uwsgi.cache_get(key).decode('utf-8') status = {'status': cache_data} return Response(json.dumps(status), mimetype='application/json')
def invoke(source_hash, method_name, packed_data): data = rpc_data_unpack(packed_data) logging.debug(str(data)) args = data["args"] kwargs = data["kwargs"] namespace = data["namespace"] settings = data["settings"] logging.debug(f"args: {args}") logging.debug(f"kwargs: {kwargs}") logging.debug(f"namespace: {namespace}", ) if not cache_man.cache_exists(source_hash): return {"error": source_hash}, 404 reg_dump = pickle.loads(cache_man.cache_get(source_hash)) if method_name not in reg_dump: return {"error": f"{source_hash}/{method_name}"}, 404 fn_globals = build_namespace(reg_dump[method_name]["imports"], namespace) fn = reg_dump[method_name]["code"].make_fn(fn_globals) result = None std_stream_subst = io.StringIO() if settings.get("return_stdout"): sys.stdout = std_stream_subst if settings.get("return_stderr"): sys.stderr = std_stream_subst try: result = fn(*args, **kwargs) except Exception as e: logging.warning( f"Method {method_name} failed with exception {e.__class__.__name__}:" ) result = RpcRemoteException(e) except SystemExit as se: result = se.code finally: if settings.get("return_stdout") or settings.get("return_stderr"): if settings.get("return_stdout"): sys.stdout = sys.__stdout__ if settings.get("return_stderr"): sys.stdout = sys.__stderr__ std_stream_subst.seek(0) fn_std_all = std_stream_subst.readlines() else: fn_std_all = [] return rpc_data_pack({"return": result, "fn_output": fn_std_all}), 200
def process_request(json_in): uid = str(uuid.uuid4()) json_in["id"] = uid uwsgi.queue_push(json.dumps(json_in)) # Actual content of message does not really matter # This is just to triger mule execution uwsgi.mule_msg("s") while not uwsgi.cache_exists(uid, CACHE_NAME): continue res = uwsgi.cache_get(uid, CACHE_NAME) uwsgi.cache_del(uid, CACHE_NAME) return Response(response=res, status=200, mimetype="application/json")
def himawari8(target): last_updatetime = bottle.request.query.get("updatetime") baseUrl = bottle.request.url[0:bottle.request.url.find("/hi8")] key = "himawari8.{}".format(target) result = None getcaps = None if uwsgi.cache_exists("himawari8"): if uwsgi.cache_exists(key): result = json.loads(uwsgi.cache_get(key)) else: getcaps = uwsgi.cache_get("himawari8") else: res = requests.get("{}{}".format(baseUrl,FIREWATCH_GETCAPS),verify=FIREWATCH_HTTPS_VERIFY) res.raise_for_status() getcaps = res.content getcaps = getcaps.decode("utf-8") uwsgi.cache_set("himawari8", getcaps, 60*10) # cache for 10 mins if not result: layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps) layers = [] for layer in layernames: layers.append([settings.PERTH_TIMEZONE.localize(datetime.datetime.strptime(re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")), layer]) layers = sorted(layers,key=lambda layer:layer[0]) for layer in layers: layer[0] = (layer[0]).strftime("%a %b %d %Y %H:%M:%S AWST") result = { "servers": [baseUrl + FIREWATCH_SERVICE], "layers": layers, "updatetime":layers[len(layers) - 1][0] } uwsgi.cache_set(key, json.dumps(result), 60*10) # cache for 10 mins if len(result["layers"]) == 0: return bottle.HTTPResponse(status=404) elif last_updatetime and last_updatetime == result["updatetime"]: bottle.response.status = 290 return "{}" else: return result
def getAllTriggersAlarming(): triggerCached = cache_get('triggerTelao', cache_options['triggerGet']['name']) if triggerCached: return json.loads(triggerCached) elif cache_get('updatingCache', cache_options['updates']['name']) == 'True': while cache_get('updatingCache', cache_options['updates']['name']) == 'True': time.sleep(0.3) else: return json.loads( cache_get('triggerTelao', cache_options['updates']['name'])) else: if cache_exists('updatingCache', cache_options['updates']['name']): cache_update('updatingCache', 'True', cache_options['updates']['expiration_time'], cache_options['updates']['name']) else: cache_set('updatingCache', 'True', cache_options['updates']['expiration_time'], cache_options['updates']['name']) admin = Admin() zbx_admin_token = admin.auth() triggers = fowardZ.sendToZabbix(method='trigger.get', params={ 'selectHosts': ["name"], 'selectGroups': ['groups'], 'selectLastEvent': ['lastEvent', 'acknowledged'], 'expandComment': 1, 'expandDescription': 1, 'only_true': 1, 'output': 'extend' }, auth=zbx_admin_token) cache_set('triggerTelao', json.dumps(triggers), cache_options['triggerGet']['expiration_time'], cache_options['triggerGet']['name']) cache_update('updatingCache', 'False', cache_options['updates']['expiration_time'], cache_options['updates']['name']) return triggers
def register(source_hash, method_name, packed_data): logging.info("{}: {}".format(request.method, request.url)) data = rpc_data_unpack(request.get_data()) logging.debug(str(data)) fn_data = {method_name: data} if cache_man.cache_exists(source_hash): reg_dump = pickle.loads(cache_man.cache_get(source_hash)) reg_dump.update(fn_data) cache_man.cache_update(source_hash, pickle.dumps(reg_dump), settings.DEFAULT_CACHE_TTL) else: cache_man.cache_set(source_hash, pickle.dumps(fn_data), settings.DEFAULT_CACHE_TTL)
def objects(cls): if not uwsgi.cache_exists("raw_aws"): if hasattr(cls, "_objects"): del cls._objects uwsgi.cache_set("raw_aws", subprocess.check_output(["aws", "ec2", "describe-instances", "--no-paginate"]), 60*15) raw = json.loads(uwsgi.cache_get("raw_aws").decode("utf-8")) if hasattr(cls, "_objects"): return cls._objects objects = {} for data in raw["Reservations"]: for instance_data in data["Instances"]: instance = Instance(instance_data=instance_data) objects[instance.instance_id] = instance cls._objects = objects return objects # A dict
def himawari8(target): if uwsgi.cache_exists("himawari8"): getcaps = uwsgi.cache_get("himawari8") else: getcaps = requests.get(FIREWATCH_GETCAPS).content uwsgi.cache_set("himawari8", getcaps, 60*10) # cache for 10 mins getcaps = getcaps.decode("utf-8") layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps) layers = [] for layer in layernames: layers.append([FIREWATCH_TZ.localize(datetime.strptime(re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")).isoformat(), layer]) result = { "servers": [FIREWATCH_SERVICE], "layers": layers } return result
def inc(self, key, delta=1): ''' Increments the value of the given key by delta. If the key does not exist, it is set with delta Returns the new value of the key, or None for any errors ''' try: delta = int(delta) if uwsgi.cache_exists(key): value = self.get(key) + delta else: value = delta self.set(key, value) return value except Exception as err: debug_error(err) return None
def callback(): startup.getUserToken(request.args.get('code')) if not uwsgi.cache_exists('isRunning'): app.logger.info( "Creating new thread for refreshing spotify token and user stats.") uwsgi.cache_set('isRunning', 'True') uwsgi.cache_set('stop_threads', 'False') sp_t = spotify_thread(2500, "Thread-spotify") sp_t.start() try: if uwsgi.cache_get('isRunning').decode( 'utf-8') == 'True' and uwsgi.cache_get('stop_threads').decode( 'utf-8') == 'True': app.logger.info("Relancement de l'application spotify") uwsgi.cache_update('stop_threads', 'False') except AttributeError: app.logger.error( f"La variable isRunning ou stop_threads n'est pas initialisée, valeurs : ir:{uwsgi.cache_get('isRunning')} et st:{uwsgi.cache_get('stop_threads')}" ) list_time_range = ['short_term', 'medium_term', 'long_term'] list_type = ['artists', 'tracks'] dict_index = { 'short_term_artists': 1, 'medium_term_artists': 2, 'long_term_artists': 3, 'short_term_tracks': 4, 'medium_term_tracks': 5, 'long_term_tracks': 6 } for type in list_type: for time_range in list_time_range: set_analytics_data( dict_index[f"{time_range}_{type}"], json.dumps( json.loads( get_users_top( startup.getAccessToken()[1], type, time_range, ))), time_range, type) app.logger.info( f"All the threads are listed below : {[thread.name for thread in threading.enumerate()]}" ) return redirect(url_for('project_spotify.spotify'))
def _cache_add_msg(self, worker_id, data): msg_key = None for msg_id in range(0, 10): msg_key = self.cache_msg_key % (worker_id, msg_id) if uwsgi.cache_exists(msg_key, self.cache) is None: break msg_key = None if msg_key is None: msg_key = self.cache_msg_key % (worker_id, 0) logger.warning( 'Cached queue for worker %s is full, overwrite data' % worker_id) logger.debug('Store message from worker %s to %s' % (self.worker_id, msg_key)) return uwsgi.cache_update( msg_key, pickle.dumps(data), self.cache_timeout if worker_id else self.cache_fallback_timeout, self.cache)
def objects(cls): if not uwsgi.cache_exists("raw_aws"): if hasattr(cls, "_objects"): del cls._objects uwsgi.cache_set( "raw_aws", subprocess.check_output( ["aws", "ec2", "describe-instances", "--no-paginate"]), 60 * 15) raw = json.loads(uwsgi.cache_get("raw_aws").decode("utf-8")) if hasattr(cls, "_objects"): return cls._objects objects = {} for data in raw["Reservations"]: for instance_data in data["Instances"]: instance = Instance(instance_data=instance_data) objects[instance.instance_id] = instance cls._objects = objects return objects
def cache_get(key: str, cache_id: str, strict: bool = True, default=None, logging_enable=True): """ Get value from uwsgi cache. Used for app config section settings only <- json serializable """ logging.debug('Looking for [%s] in cache [%s]', key, cache_id) if uwsgi.cache_exists(key, cache_id): value = uwsgi.cache_get(key, cache_id) value = json.loads(value.decode()) if logging_enable: logging.debug('cache [%s] -> %s : %s', cache_id, key, value) return value if strict: raise HttpException(details=f'Cache [{cache_id}]: {key} is missing') if logging_enable: logging.debug('Сache [%s] missed for [%s] DEFAULT value [%s] is used', cache_id, key, default) return default
def __contains__(self, item): return uwsgi.cache_exists(item)
def get_layermetadata(layerids, kmiserver=settings.KMI_SERVER, results={}): multiple_layers = True if isinstance(layerids, basestring): layerids = [layerids] multiple_layers = False #group layers against layer workspace layers = {} for layerid in layerids: layerid = layerid.strip() #check whether it is cached or not key = layermetadatakey(layerid) if uwsgi.cache_exists(key): try: metadata = uwsgi.cache_get(key) if metadata: if layerid in results: results[layerid].update(json.loads(metadata)) else: results[layerid] = json.loads(metadata) #print("Retrieve the metadata from cache for layer ({})".format(layerid)) continue except: pass layer = layerid.split(":") if len(layer) == 1: #no workspace layer_ws = "" layer = layer[0] else: layer_ws = layer[0] layer = layer[1] if layer_ws not in layers: layers[layer_ws] = [layer] else: layers[layer_ws].append(layer) if layers: session_cookie = settings.get_session_cookie() kmiserver = get_kmiserver(kmiserver) #find the layer's metadata url = None for layer_ws, layers in layers.iteritems(): if layer_ws: url = "{}/{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format( kmiserver, layer_ws) else: url = "{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format( kmiserver) res = requests.get(url, verify=False, cookies=session_cookie) res.raise_for_status() tree = ET.fromstring(res.content) capability = tree.find('Capability') if not len(capability): raise Exception("getCapability failed") kmi_layers = capability.findall("Layer") while kmi_layers: kmi_layer = kmi_layers.pop() name = get_child_value(kmi_layer, "Name") if name: try: index = layers.index(name) except: index = -1 if index >= 0: #this layer's metadata is requsted by the user if layer_ws: layerid = "{}:{}".format(layer_ws, name) else: layerid = name if layerid in results: result = results[layerid] else: result = {"id": layerid} results[layerid] = result del layers[index] result["title"] = get_child_value(kmi_layer, "Title") result["abstract"] = get_child_value( kmi_layer, "Abstract") result["srs"] = get_child_value(kmi_layer, "SRS") bbox = kmi_layer.find("LatLonBoundingBox") if bbox is not None: result["latlonBoundingBox"] = [ float(bbox.attrib["miny"]), float(bbox.attrib["minx"]), float(bbox.attrib["maxy"]), float(bbox.attrib["maxx"]) ] else: result["latlonBoundingBox"] = None for bbox in kmi_layer.findall("BoundingBox"): result["latlonBoundingBox_{}".format( bbox.attrib["SRS"].upper())] = [ float(bbox.attrib["miny"]), float(bbox.attrib["minx"]), float(bbox.attrib["maxy"]), float(bbox.attrib["maxx"]) ] #cache it for 6 hours key = layermetadatakey(result["id"]) try: if uwsgi.cache_exists(key): uwsgi.cache_update(key, json.dumps(result), 6 * 3600) else: uwsgi.cache_set(key, json.dumps(result), 6 * 3600) except: pass #print("Retrieve the metadata from kmi for layer ({})".format(result["id"])) if len(layers): continue else: #already find metadata for all required layers break sub_layers = kmi_layer.findall("Layer") if sub_layers: kmi_layers += sub_layers if len(layers) == 1: if layer_ws: raise Exception("The layer({}:{}) Not Found".format( layer_ws, layers[0])) else: raise Exception("The layer({}) Not Found".format( layers[0])) elif len(layers) > 1: if layer_ws: raise Exception("The layers({}) Not Found".format(",".join( ["{}:{}".format(layer_ws, l) for l in layers]))) else: raise Exception("The layers({}) Not Found".format( ",".join(layers))) if multiple_layers: return results else: return results[layerids[0]]
def get_layerdefinition(layerids, kmiserver=settings.KMI_SERVER, results={}): kmiserver = get_kmiserver(kmiserver) multiple_layers = True if isinstance(layerids, basestring): layerids = [layerids] multiple_layers = False #group layers against layer workspace layers = {} for layerid in layerids: layerid = layerid.strip() #check whether it is cached or not key = layerdefinitionkey(layerid) if uwsgi.cache_exists(key): try: definitiondata = uwsgi.cache_get(key) if definitiondata: if layerid in results: results[layerid].update(json.loads(definitiondata)) else: results[layerid] = json.loads(definitiondata) continue except: pass layer = layerid.split(":") if len(layer) == 1: #no workspace layer_ws = "" layer = layer[0] else: layer_ws = layer[0] layer = layer[1] if layer_ws not in layers: layers[layer_ws] = [layerid] else: layers[layer_ws].append(layerid) if layers: kmiserver = get_kmiserver(kmiserver) session_cookie = settings.get_session_cookie() url = None for layer_ws, layers in layers.iteritems(): if layer_ws: url = "{}/{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format( kmiserver, layer_ws, ",".join(layers)) else: url = "{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format( kmiserver, ",".join(layers)) res = requests.get(url, verify=False, cookies=session_cookie) res.raise_for_status() layersdata = res.json() for layer in layersdata.get("featureTypes") or []: if layer_ws: layerid = "{}:{}".format(layer_ws, layer["typeName"]) else: layerid = layer["typeName"] try: index = layers.index(layerid) except: index = -1 if index >= 0: #this layer's metadata is requsted by the user if layerid in results: result = results[layerid] else: result = {"id": layerid} results[layerid] = result result["properties"] = layer["properties"] result["geometry_property"] = None result["geometry_properties"] = [] result["geometry_type"] = None result["geometry_property_msg"] = None del layers[index] #find spatial columns for prop in layer["properties"]: if prop["type"].startswith("gml:"): #spatial column result["geometry_properties"].append(prop) if len(result["geometry_properties"]) == 1: result["geometry_property"] = result[ "geometry_properties"][0] result["geometry_type"] = result[ "geometry_properties"][0]["localType"].lower() elif len(result["geometry_properties"]) > 1: #have more than one geometry properties, try to find the right one if layer_ws: url = "{}/{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format( kmiserver, layer_ws, layerid) else: url = "{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format( kmiserver, layerid) res = requests.get(url, verify=False, cookies=session_cookie) res.raise_for_status() featuresdata = res.json() if len(featuresdata["features"]) > 0: feat = featuresdata["features"][0] for prop in result["geometry_properties"]: if prop["name"] == feat["geometry_name"]: result["geometry_property"] = prop result["geometry_type"] = prop[ "localType"].lower() break if not result["geometry_property"]: result[ "geometry_property_msg"] = "Layer '{}' has more than one geometry columns, can't identity which column is used as the geometry column.".format( layerid) else: result[ "geometry_property_msg"] = "Layer '{}' is not a spatial layer".format( layerid) if result["geometry_property"]: #found the geometry property, remove it from properties index = len(result["properties"]) - 1 while index >= 0: if result["properties"][index] == result[ "geometry_property"]: #this is the geometry property,remove it from properties del result["properties"][index] break index -= 1 #cache it for 1 day key = layerdefinitionkey(layerid) try: if uwsgi.cache_exists(key): uwsgi.cache_update(key, json.dumps(result), 24 * 3600) else: uwsgi.cache_set(key, json.dumps(result), 24 * 3600) except: pass if len(layers) == 1: if layer_ws: raise Exception("The layer({}:{}) Not Found".format( layer_ws, layers[0])) else: raise Exception("The layer({}) Not Found".format(layers[0])) elif len(layers) > 1: if layer_ws: raise Exception("The layers({}) Not Found".format(",".join( ["{}:{}".format(layer_ws, l) for l in layers]))) else: raise Exception("The layers({}) Not Found".format( ",".join(layers))) if multiple_layers: return results else: return results[layerids[0]]
def application(env, start_response): uwsgi.websocket_handshake(env["HTTP_SEC_WEBSOCKET_KEY"], env.get("HTTP_ORIGIN", "")) if (not uwsgi.cache_exists("chats")): uwsgi.cache_update("chats", "") if (not uwsgi.cache_exists("names")): uwsgi.cache_update("names", "") if (not uwsgi.cache_exists("roomNumbers")): uwsgi.cache_update("roomNumbers", "") #Static data for testing: if (uwsgi.cache_get("roomNumbers") == ""): uwsgi.cache_update("roomNumbers", uwsgi.cache_get("roomNumbers") + "".join([str(number) for number in [0, 10, 11, 12]])) if (not uwsgi.cache_exists("0")): uwsgi.cache_update("0", "1Reimu11Marisa22Rumia33Daiyousei44") if (not uwsgi.cache_exists("10")): uwsgi.cache_update("10", "2Cirno11Meiling22Koakuma33Patchouli44") if (not uwsgi.cache_exists("11")): uwsgi.cache_update("11", "3Sakuya11Remilia22Flandre33Letty44") if (not uwsgi.cache_exists("12")): uwsgi.cache_update("12", "0Chen11Alice22Lily33") playersMax = 4 nameChat = "" roomsMax = 100 roomNumberChat = -1 while (True): msg = uwsgi.websocket_recv() msg_type = "" msg_data = "" if (msg and (msg != "")): msg_type = msg.split("")[0] msg_data = msg.split("")[1] print "Message: " + repr(msg) + "; " + "Type: " + repr(msg_type) + "; " + "Data: " + repr(msg_data) if (msg_type == "chat"): chats = uwsgi.cache_get("chats") chats += "" + msg_data + "" uwsgi.cache_update("chats", chats) if (msg_type == "close"): roomNumber = msg_data.split("")[0] name = msg_data.split("")[1] if (name): names = uwsgi.cache_get("names").split("") names.remove(name) uwsgi.cache_update("names", "".join(names)) chats = uwsgi.cache_get("chats").split("") i = 0 while (i < len(chats)): chat = chats[i].split("") if (name in chats[3:]): del chat[chat.index(name, 3)] chats[i] = "".join(chat) if (int(roomNumber) > -1): room = uwsgi.cache_get(roomNumber).split("") i = 1 while (i < len(room)): if (name == room[i].split("")[0]): room[i] = "" room = "".join(room) uwsgi.cache_update(roomNumber, room) if (room[room.index(""):] == playersMax * ""): roomNumbers = uwsgi.cache_get("roomNumbers").split("") roomNumbers.remove(roomNumber) uwsgi.cache_update("roomNumbers", "".join(roomNumbers)) uwsgi.cache_del(roomNumber) break i += 1 print name + " disconnected." return [""] if (msg_type == "leave"): roomNumber = msg_data.split("")[0] name = msg_data.split("")[1] roomNumberChat = -1 room = uwsgi.cache_get(roomNumber).split("") i = 1 while (i < len(room)): if (name == room[i].split("")[0]): room[i] = "" room = "".join(room) uwsgi.cache_update(roomNumber, room) if (room[room.index(""):] == playersMax * ""): roomNumbers = uwsgi.cache_get("roomNumbers").split("") roomNumbers.remove(roomNumber) uwsgi.cache_update("roomNumbers", "".join(roomNumbers)) uwsgi.cache_del(roomNumber) break i += 1 if (msg_type == "join"): roomNumber = msg_data.split("")[0] name = msg_data.split("")[1] room = uwsgi.cache_get(roomNumber).split("") if (room[0] != "0"): uwsgi.websocket_send("false") else: i = 1 while (i < len(room)): if ((room[i] == "") and (room[i] != name + "")): room[i] = name + room[i] room = "".join(room) uwsgi.cache_update(roomNumber, room) uwsgi.websocket_send(room) roomNumberChat = int(roomNumber) break i += 1 else: uwsgi.websocket_send("false") if (msg_type == "name"): if (msg_data in uwsgi.cache_get("names").split("")): uwsgi.websocket_send("false") else: names = uwsgi.cache_get("names").split("") names.append(msg_data) uwsgi.cache_update("names", "".join(names)) print msg_data + " connected." nameChat = msg_data uwsgi.websocket_send("true") if (msg_type == "roomCreate"): roomNumbers = uwsgi.cache_get("roomNumbers").split("") if (len(roomNumbers) == 100): #The cache is full uwsgi.websocket_send("false") roomNumbers = [int(number) for number in roomNumbers if number] #Not most efficient but easy way to find the lowest available room number: roomNumber = 0 while (roomNumber in roomNumbers): roomNumber += 1 roomNumbers.append(roomNumber) roomNumbers = sorted(roomNumbers) uwsgi.cache_update("roomNumbers", "".join([str(number) for number in roomNumbers])) roomNumberChat = roomNumber roomNumber = str(roomNumber) uwsgi.cache_update(roomNumber, "0" + "" + msg_data + "" + (playersMax - 1) * "") uwsgi.websocket_send(roomNumber) if (msg_type == "rooms"): rooms = [] for number in uwsgi.cache_get("roomNumbers").split(""): if (number): rooms.append(number + "" + uwsgi.cache_get(number)) uwsgi.websocket_send("".join(rooms)) if (msg_type == "wait"): uwsgi.websocket_send(uwsgi.cache_get(msg_data.split("")[0])) room = uwsgi.cache_get(msg_data.split("")[0]).split("") room = [player.split("") for player in room] for player in room[1:]: if (not player[0]): break else: uwsgi.websocket_send("ready") chats = uwsgi.cache_get("chats") chats = chats.split("") i = 0 while (i < len(chats)): chat = chats[i].split("") if (chat == [""]): i += 1 continue if (nameChat not in chat[3:]): chat.append(nameChat) chats[i] = "".join(chat) if (roomNumberChat == int(chat[0])): uwsgi.websocket_send("chat" + chat[1] + "" + chat[2]) names = uwsgi.cache_get("names").split("") namesChat = chat[3:] for name in names: if (name not in namesChat): break else: del chats[i] i += 1 uwsgi.cache_update("chats", "".join(chats))
def get_layerdefinition(layerids,kmiserver="https://kmi.dbca.wa.gov.au/geoserver",results={}): kmiserver = get_kmiserver(kmiserver) multiple_layers = True if isinstance(layerids,basestring): layerids = [layerids] multiple_layers = False #group layers against layer workspace layers = {} for layerid in layerids: layerid = layerid.strip() #check whether it is cached or not key = layerdefinitionkey(layerid) if uwsgi.cache_exists(key): try: definitiondata = uwsgi.cache_get(key) if definitiondata: if layerid in results: results[layerid].update(json.loads(definitiondata)) else: results[layerid] = json.loads(definitiondata) continue except: pass layer = layerid.split(":") if len(layer) == 1: #no workspace layer_ws = "" layer = layer[0] else: layer_ws = layer[0] layer = layer[1] if layer_ws not in layers: layers[layer_ws] = [layerid] else: layers[layer_ws].append(layerid) if layers: kmiserver = get_kmiserver(kmiserver) session_cookie = settings.get_session_cookie() url = None for layer_ws,layers in layers.iteritems(): if layer_ws: url = "{}/{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(kmiserver,layer_ws,",".join(layers)) else: url = "{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(kmiserver,",".join(layers)) res = requests.get( url, verify=False, cookies=session_cookie ) res.raise_for_status() layersdata = res.json() for layer in layersdata.get("featureTypes") or []: if layer_ws: layerid = "{}:{}".format(layer_ws,layer["typeName"]) else: layerid = layer["typeName"] try: index = layers.index(layerid) except: index = -1 if index >= 0: #this layer's metadata is requsted by the user if layerid in results: result = results[layerid] else: result = {"id":layerid} results[layerid] = result result["properties"] = layer["properties"] result["geometry_property"] = None result["geometry_properties"] = [] result["geometry_type"] = None result["geometry_property_msg"] = None del layers[index] #find spatial columns for prop in layer["properties"]: if prop["type"].startswith("gml:"): #spatial column result["geometry_properties"].append(prop) if len(result["geometry_properties"]) == 1: result["geometry_property"] = result["geometry_properties"][0] result["geometry_type"] = result["geometry_properties"][0]["localType"].lower() elif len(result["geometry_properties"]) > 1: #have more than one geometry properties, try to find the right one if layer_ws: url = "{}/{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(kmiserver,layer_ws,layerid) else: url = "{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(kmiserver,layerid) res = requests.get( url, verify=False, cookies=session_cookie ) res.raise_for_status() featuresdata = res.json() if len(featuresdata["features"]) > 0: feat = featuresdata["features"][0] for prop in result["geometry_properties"]: if prop["name"] == feat["geometry_name"]: result["geometry_property"] = prop result["geometry_type"] = prop["localType"].lower() break if not result["geometry_property"]: result["geometry_property_msg"] = "Layer '{}' has more than one geometry columns, can't identity which column is used as the geometry column.".format(layerid) else: result["geometry_property_msg"] = "Layer '{}' is not a spatial layer".format(layerid) if result["geometry_property"]: #found the geometry property, remove it from properties index = len(result["properties"]) - 1 while index >= 0: if result["properties"][index] == result["geometry_property"]: #this is the geometry property,remove it from properties del result["properties"][index] break index -= 1 #cache it for 1 day key = layerdefinitionkey(layerid) try: if uwsgi.cache_exists(key): uwsgi.cache_update(key, json.dumps(result),24 * 3600) else: uwsgi.cache_set(key, json.dumps(result),24 * 3600) except: pass if len(layers) == 1: if layer_ws: raise Exception("The layer({}:{}) Not Found".format(layer_ws,layers[0])) else: raise Exception("The layer({}) Not Found".format(layers[0])) elif len(layers) > 1: if layer_ws: raise Exception("The layers({}) Not Found".format(",".join(["{}:{}".format(layer_ws,l) for l in layers]))) else: raise Exception("The layers({}) Not Found".format(",".join(layers))) if multiple_layers: return results else: return results[layerids[0]]
def update_uwsgi_cache(key, value): if uwsgi.cache_exists(key): uwsgi.cache_update(key, value) else: uwsgi.cache_set(key, value)
def get(self, item, default=None): item = str(item) if uwsgi.cache_exists(item, self.cachename): return pickle.loads(uwsgi.cache_get(item, self.cachename)) else: return default
def get_layermetadata(layerids,kmiserver="https://kmi.dbca.wa.gov.au/geoserver",results={}): multiple_layers = True if isinstance(layerids,basestring): layerids = [layerids] multiple_layers = False #group layers against layer workspace layers = {} for layerid in layerids: layerid = layerid.strip() #check whether it is cached or not key = layermetadatakey(layerid) if uwsgi.cache_exists(key): try: metadata = uwsgi.cache_get(key) if metadata: if layerid in results: results[layerid].update(json.loads(metadata)) else: results[layerid] = json.loads(metadata) #print("Retrieve the metadata from cache for layer ({})".format(layerid)) continue except: pass layer = layerid.split(":") if len(layer) == 1: #no workspace layer_ws = "" layer = layer[0] else: layer_ws = layer[0] layer = layer[1] if layer_ws not in layers: layers[layer_ws] = [layer] else: layers[layer_ws].append(layer) if layers: session_cookie = settings.get_session_cookie() kmiserver = get_kmiserver(kmiserver) #find the layer's metadata url = None for layer_ws,layers in layers.iteritems(): if layer_ws: url = "{}/{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(kmiserver,layer_ws) else: url = "{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(kmiserver) res = requests.get( url, verify=False, cookies=session_cookie ) res.raise_for_status() tree = ET.fromstring(res.content) capability = tree.find('Capability') if not len(capability): raise Exception("getCapability failed") kmi_layers = capability.findall("Layer") while kmi_layers: kmi_layer = kmi_layers.pop() name = get_child_value(kmi_layer,"Name") if name: try: index = layers.index(name) except: index = -1 if index >= 0: #this layer's metadata is requsted by the user if layer_ws: layerid = "{}:{}".format(layer_ws,name) else: layerid = name if layerid in results: result = results[layerid] else: result = {"id":layerid} results[layerid] = result del layers[index] result["title"] = get_child_value(kmi_layer,"Title") result["abstract"] = get_child_value(kmi_layer,"Abstract") result["srs"] = get_child_value(kmi_layer,"SRS") bbox = kmi_layer.find("LatLonBoundingBox") if bbox is not None: result["latlonBoundingBox"] = [float(bbox.attrib["miny"]),float(bbox.attrib["minx"]),float(bbox.attrib["maxy"]),float(bbox.attrib["maxx"])] else: result["latlonBoundingBox"] = None for bbox in kmi_layer.findall("BoundingBox"): result["latlonBoundingBox_{}".format(bbox.attrib["SRS"].upper())] = [float(bbox.attrib["miny"]),float(bbox.attrib["minx"]),float(bbox.attrib["maxy"]),float(bbox.attrib["maxx"])] #cache it for 6 hours key = layermetadatakey(result["id"]) try: if uwsgi.cache_exists(key): uwsgi.cache_update(key, json.dumps(result),6 * 3600) else: uwsgi.cache_set(key, json.dumps(result),6 * 3600) except: pass #print("Retrieve the metadata from kmi for layer ({})".format(result["id"])) if len(layers): continue else: #already find metadata for all required layers break sub_layers = kmi_layer.findall("Layer") if sub_layers: kmi_layers += sub_layers if len(layers) == 1: if layer_ws: raise Exception("The layer({}:{}) Not Found".format(layer_ws,layers[0])) else: raise Exception("The layer({}) Not Found".format(layers[0])) elif len(layers) > 1: if layer_ws: raise Exception("The layers({}) Not Found".format(",".join(["{}:{}".format(layer_ws,l) for l in layers]))) else: raise Exception("The layers({}) Not Found".format(",".join(layers))) if multiple_layers: return results else: return results[layerids[0]]
def get_layermetadata(layers,results={}): multiple_layers = True if isinstance(layers,dict): layerids = [layers] multiple_layers = False #group layers against layer workspace unresolved_layers = {} for layer in layers: #check whether it is cached or not key = layermetadatakey(layer.id) if uwsgi.cache_exists(key): try: metadata = uwsgi.cache_get(key) if metadata: if layer.id in results: results[layer.id].update(json.loads(metadata)) else: results[layer.id] = json.loads(metadata) #print("Retrieve the metadata from cache for layer ({})".format(layerid)) continue except: pass if layer.wfsservice in unrsolved_layers: unresolved_layers[layer.wfsservice].append(layer.id) else: unresolved_layers[layer.wfsservice] = [layer.id] if unresolved_layers: session_cookie = settings.get_session_cookie() #find the layer's metadata url = None for wfsservice, layerids in unresolved_layers: url = "{}?service=wfs&version=2.0.0&request=GetCapabilities".format(wfsservice) res = requests.get( url, verify=False, cookies=session_cookie ) res.raise_for_status() namespaces = dict([(node[0].encode(),node[1]) for _, node in ET.iterparse(res.content.decode(), events=['start-ns'])]) tree = ET.fromstring(res.content) capability = tree.find('wfs:FeatureTypeList',namespaces=namespaces) if not len(capability): raise Exception("getCapability failed") kmi_layers = capability.findall("wfs:FeatureType",namespaces=namespaces) for kmi_layer in kmi_layers: name = get_child_value(kmi_layer,"wfs:Name",namespaces) if name: try: index = layerids.index(name) except: index = -1 if index >= 0: #this layer's metadata is requsted by the user if name in results: result = results[name] else: result = {"id":name} results[name] = result del layerids[index] result["title"] = get_child_value(kmi_layer,"wfs:Title",namespaces) result["abstract"] = get_child_value(kmi_layer,"wfs:Abstract",namespaces) result["srs"] = get_child_value(kmi_layer,"wfs:DefaultCRS",namespaces) bbox = kmi_layer.find("ows:WGS84BoundingBox",namespaces=namespaces) if bbox is not None: lowercorner = get_child_value(bbox,"ows:LowerCorner",namespaces).split() uppercorner = get_child_value(bbox,"ows:UpperCorner",namespaces).split() result["latlonBoundingBox"] = [float(lowercorner[1]),float(lowercorner[0]),float(uppercorner[1]),float(uppercorner[0])] else: result["latlonBoundingBox"] = None #cache it for 6 hours key = layermetadatakey(result["id"]) try: if uwsgi.cache_exists(key): uwsgi.cache_update(key, json.dumps(result),6 * 3600) else: uwsgi.cache_set(key, json.dumps(result),6 * 3600) except: pass #print("Retrieve the metadata from kmi for layer ({})".format(result["id"])) if len(layerids): continue else: #already find metadata for all required layers break if len(layerids) == 1: raise Exception("The layer({}) Not Found in WFS Service({})".format(layerids[0],wfsservice)) elif len(layerids) > 1: raise Exception("The layers({}) Not Found in WFS Service({})".format(",".join(layerids),wfsservice)) if multiple_layers: return results else: return results[layers[0].id]
def init(env): """ Initialize/cache application settings, set logging """ logging_debugs = [ ] # buffer for debug loggging, since logger has not yet been initialized. global CACHE_CFG_ID # pylint: disable=global-statement CACHE_CFG_ID = os.environ.get('IP2W_CACHE_CFG_ID', CACHE_CFG_ID) logging_debugs.append(f'{os.getuid()}:{os.getgid()}') logging_debugs.append( f'OS Environmets:\n{os.environ}\nUWSGI Environments:\n{env}') logging_debugs.append(f'Start init app using cache [{CACHE_CFG_ID=}]') try: if not uwsgi.cache_exists( 'app', CACHE_CFG_ID ): # If {app} section is present in cache, then everything else (app.*) is there too. cfg_fp = os.environ.get( 'IP2W_INI_FILE', str(Path(__file__).parent / "config" / INI_FILE_NAME)) with open(cfg_fp) as cfg_f: cfg_text = cfg_f.read() cfg_text = os.path.expandvars( cfg_text) # resolve $ENV | ${ENV} refs to os.environ cfg_text = expand_uwsgi_ref2env( cfg_text, env) # resolve $(ENV) refs to uwsgi env cfg_text = expand_uwsgi_ref2env( cfg_text, os.environ) # resolve $(ENV) refs to os.environ config = ConfigParser(interpolation=ExtendedInterpolation(), strict=False, allow_no_value=True) # use original case of key names config.optionxform = lambda option: option # # add section [wsgi.env] to resolve uwsgi refs ${wsgi.env:key} # config.read_dict({'wsgi.env': dict(filter(lambda item: type(item[1]) in [int, str], env.items()))}) config.read_string( cfg_text ) # + resolve ${section:key} refs if [section] key exists else config[section].items() will throw exception for section in config: if section.startswith('app'): # just filter uwsgi section cfg_section = dict( config[section].items() ) # actual interpolation takes place here throu internal calling to config.get(section, ...) logging_debugs.append( f'cache [{CACHE_CFG_ID}] <- {section}: {jpp(cfg_section)} ' ) # https://readthedocs.org/projects/uwsgi-docs/downloads/pdf/latest/ : # The expires argument (default to 0 for disabled) is the number of seconds after the object is no more valid # (and will be removed by the cache sweeper when purge_lru is not set...) uwsgi.cache_set(section, json.dumps(cfg_section).encode(), 0, CACHE_CFG_ID) logging_debugs.append( f'uwsgi.cache_exists({section}, {CACHE_CFG_ID}) = {uwsgi.cache_exists(section, CACHE_CFG_ID)}' ) else: logging_debugs.append(f'Cache [{CACHE_CFG_ID}] -> init app cfg') cfg_app = config_app(logging_enable=False) logging_debugs.append(f'Config [app]: {jpp(cfg_app)}') for app_key in cfg_app: if app_key.startswith('app_') and app_key.upper() in globals(): globals()[app_key.upper()] = cfg_app[app_key] logging_debugs.append( f'Init global var [{app_key.upper()}]: {cfg_app[app_key]}') # init logging (basicConfig only) cfg_logging = config_app('logging', logging_enable=False) logging_basic_сonfigs = dict( (key.replace('base_config_', '').lower(), cfg_logging[key]) for key in cfg_logging.keys() if key.startswith('base_config_')) logging.basicConfig(**logging_basic_сonfigs, force=True) logging_debugs.append( f'Init logging.basicConfig: {logging_basic_сonfigs}') finally: logging.debug( '\n'.join(logging_debugs) ) # note: if logging is not initialized then output -> uwsgi log
def data_handler(): path = [] cache_data = None message = None fails_messages = { 'notfound': 'Увы, но общих знакомых нет', 'fail': 'Увы, но общих знакомых нет', 'inprogress': 'Идет поиск, подождите' } user = request.form['user'] user_id = request.form['user_id'] if not request.form['search']: error = 'Нужно указать, кого искать' return render_template('search_user.html', error=error, user=user, user_id=user_id) username = request.cookies.get('username') service = VkApiForWeb(login=username) vk_session = service.session if service.error: return render_template('search_user.html', error=error, user=user, user_id=user_id) api = vk_session.get_api() try: target_user = api.users.get(user_ids=str(request.form['search'])) except: error = 'Пользователя не существует. Попробуйте другой запрос.' return render_template('search_user.html', error=error, user=user, user_id=user_id) target_user_id = target_user[0].get('id') key = f'{str(user_id)}_{str(target_user_id)}' if uwsgi.cache_exists(key): cache_data = uwsgi.cache_get(key).decode('utf-8') if cache_data == 'found': path = find_path(user_id, target_user_id) elif cache_data in list(fails_messages.keys()): message = fails_messages.get(cache_data) else: path = find_path(user_id, target_user_id) if path: message = 'Ура! Найдены общие знакомые' cache_data = 'found' uwsgi.cache_update(key, 'found') elif cache_data not in list(fails_messages.keys()): message = fails_messages.get('inprogress') cache_data = 'inprogress' uwsgi.mule_msg(key) return redirect( url_for('result', user_id=user_id, message=message, target_user_id=target_user_id, state=cache_data))
def _get_profile(app): # get app profile profile = None appPath = os.path.join(settings.DIST_PATH, "{}.js".format(app)) if not os.path.exists(appPath): raise Exception("Application({}) Not Found".format(app)) key = "{}_profile".format(app) profileChanged = False if uwsgi.cache_exists(key): profile = uwsgi.cache_get(key) if profile: profile = json.loads(profile) if repr( os.path.getmtime(appPath) ) != profile["mtime"] or os.path.getsize(appPath) != profile["size"]: profileChanged = True profile = None if not profile: file_data = None with open(appPath, "rb") as f: file_data = f.read() m = profile_re.search(file_data.decode("utf-8")) profile = { 'mtime': repr(os.path.getmtime(appPath)), 'size': os.path.getsize(appPath), 'profile': demjson.decode(m.group("profile") if m else "{}") } m = hashlib.md5() m.update(file_data) profile['profile']['build']['md5'] = base64.urlsafe_b64encode( m.digest()).decode().rstrip("=") file_data = None if profileChanged: uwsgi.cache_update(key, json.dumps(profile)) else: uwsgi.cache_set(key, json.dumps(profile)) profile["profile"]["dependents"] = {} # get vendor md5 vendorPath = os.path.join(settings.DIST_PATH, "vendor.js") if not os.path.exists(vendorPath): raise Exception("Vendor library Not Found") key = "{}_profile".format("vendor") profileChanged = False vendorProfile = None if uwsgi.cache_exists(key): vendorProfile = uwsgi.cache_get(key) if vendorProfile: vendorProfile = json.loads(vendorProfile) if repr(os.path.getmtime( vendorPath)) != vendorProfile["mtime"] or os.path.getsize( vendorPath) != vendorProfile["size"]: profileChanged = True vendorProfile = None if not vendorProfile: m = hashlib.md5() with open(vendorPath, "rb") as f: m.update(f.read()) vendorProfile = { 'mtime': repr(os.path.getmtime(vendorPath)), 'size': os.path.getsize(vendorPath), 'vendorMD5': base64.urlsafe_b64encode(m.digest()).decode().rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(vendorProfile)) else: uwsgi.cache_set(key, json.dumps(vendorProfile)) profile["profile"]["dependents"]["vendorMD5"] = vendorProfile["vendorMD5"] # get env profile envPath = os.path.join(settings.BASE_DIST_PATH, 'release', 'static', 'js', "{}.env.js".format(settings.ENV_TYPE)) if not os.path.exists(envPath): raise Exception("'{}.env.js' is missing.".format(settings.ENV_TYPE)) else: key = "{}_{}_profile".format("env", settings.ENV_TYPE) profileChanged = False envProfile = None if uwsgi.cache_exists(key): envProfile = uwsgi.cache_get(key) if envProfile: envProfile = json.loads(envProfile) if repr(os.path.getmtime(envPath)) != envProfile[ "mtime"] or os.path.getsize(envPath) != envProfile["size"]: profileChanged = True envProfile = None if not envProfile: m = hashlib.md5() with open(envPath, "rb") as f: m.update(f.read()) envProfile = { 'mtime': repr(os.path.getmtime(envPath)), 'size': os.path.getsize(envPath), 'envMD5': base64.urlsafe_b64encode(m.digest()).decode().rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(envProfile)) else: uwsgi.cache_set(key, json.dumps(envProfile)) profile["profile"]["dependents"]["envMD5"] = envProfile["envMD5"] profile["profile"]["envType"] = settings.ENV_TYPE # get style profile stylePath = os.path.join(settings.BASE_DIST_PATH, 'release', 'static', 'css', "style.css") if not os.path.exists(stylePath): raise Exception("'style.css' is missing.") else: key = "style_profile" profileChanged = False styleProfile = None if uwsgi.cache_exists(key): styleProfile = uwsgi.cache_get(key) if styleProfile: styleProfile = json.loads(styleProfile) if repr(os.path.getmtime( stylePath)) != styleProfile["mtime"] or os.path.getsize( stylePath) != styleProfile["size"]: profileChanged = True styleProfile = None if not styleProfile: m = hashlib.md5() with open(stylePath, "rb") as f: m.update(f.read()) styleProfile = { 'mtime': repr(os.path.getmtime(stylePath)), 'size': os.path.getsize(stylePath), 'styleMD5': base64.urlsafe_b64encode(m.digest()).decode().rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(styleProfile)) else: uwsgi.cache_set(key, json.dumps(styleProfile)) profile["profile"]["dependents"]["styleMD5"] = styleProfile["styleMD5"] return profile["profile"]
def set_cache(key, val): if not uwsgi.cache_exists(key): uwsgi.cache_set(key, pickle.dumps(val)) else: uwsgi.cache_update(key, pickle.dumps(val))
def _get_profile(app): #get app profile profile = None appPath = os.path.join(DIST_PATH,"{}.js".format(app)) if not os.path.exists(appPath): appPath = os.path.join(DIST_PATH,"sss.js") key = "{}_profile".format(app) profileChanged = False if uwsgi.cache_exists(key): profile = uwsgi.cache_get(key) if profile: profile = json.loads(profile) if repr(os.path.getmtime(appPath)) != profile["mtime"] or os.path.getsize(appPath) != profile["size"]: profileChanged = True profile = None if not profile: file_data = None with open(appPath,"rb") as f: file_data = f.read() m = profile_re.search(file_data) profile = m.group("profile") if m else "{}" profile = { 'mtime':repr(os.path.getmtime(appPath)), 'size':os.path.getsize(appPath), 'profile':demjson.decode(profile) } m = hashlib.md5() m.update(file_data) profile['profile']['build']['md5'] = base64.urlsafe_b64encode(m.digest()).rstrip("=") file_data = None if profileChanged: uwsgi.cache_update(key, json.dumps(profile)) else: uwsgi.cache_set(key, json.dumps(profile)) profile["profile"]["dependents"] = {} #get vendor md5 vendorPath = os.path.join(DIST_PATH,"vendor.js") if not os.path.exists(vendorPath): raise Exception("Vendor library not found") key = "{}_profile".format("vendor") profileChanged = False vendorProfile = None if uwsgi.cache_exists(key): vendorProfile = uwsgi.cache_get(key) if vendorProfile: vendorProfile = json.loads(vendorProfile) if repr(os.path.getmtime(vendorPath)) != vendorProfile["mtime"] or os.path.getsize(vendorPath) != vendorProfile["size"]: profileChanged = True vendorProfile = None if not vendorProfile: m = hashlib.md5() with open(vendorPath,"rb") as f: m.update(f.read()) vendorProfile = { 'mtime':repr(os.path.getmtime(vendorPath)), 'size':os.path.getsize(vendorPath), 'vendorMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(vendorProfile)) else: uwsgi.cache_set(key, json.dumps(vendorProfile)) profile["profile"]["dependents"]["vendorMD5"] = vendorProfile["vendorMD5"] #get env profile envPath = os.path.join(BASE_DIST_PATH,'release','static','js',"{}-{}.env.js".format(app,ENV_TYPE)) if not os.path.exists(envPath): raise Exception("'{}-{}.env.js' is missing.".format(app,ENV_TYPE)) else: key = "{}_{}_profile".format("env",ENV_TYPE) profileChanged = False envProfile = None if uwsgi.cache_exists(key): envProfile = uwsgi.cache_get(key) if envProfile: envProfile = json.loads(envProfile) if repr(os.path.getmtime(envPath)) != envProfile["mtime"] or os.path.getsize(envPath) != envProfile["size"]: profileChanged = True envProfile = None if not envProfile: m = hashlib.md5() with open(envPath,"rb") as f: m.update(f.read()) envProfile = { 'mtime':repr(os.path.getmtime(envPath)), 'size':os.path.getsize(envPath), 'envMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(envProfile)) else: uwsgi.cache_set(key, json.dumps(envProfile)) profile["profile"]["dependents"]["envMD5"] = envProfile["envMD5"] profile["profile"]["envType"] = ENV_TYPE #get style profile stylePath = os.path.join(BASE_DIST_PATH,'release','static','css',"style.css") if not os.path.exists(stylePath): raise Exception("'style.css' is missing.") else: key = "style_profile" profileChanged = False styleProfile = None if uwsgi.cache_exists(key): styleProfile = uwsgi.cache_get(key) if styleProfile: styleProfile = json.loads(styleProfile) if repr(os.path.getmtime(stylePath)) != styleProfile["mtime"] or os.path.getsize(stylePath) != styleProfile["size"]: profileChanged = True styleProfile = None if not styleProfile: m = hashlib.md5() with open(stylePath,"rb") as f: m.update(f.read()) styleProfile = { 'mtime':repr(os.path.getmtime(stylePath)), 'size':os.path.getsize(stylePath), 'styleMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=") } if profileChanged: uwsgi.cache_update(key, json.dumps(styleProfile)) else: uwsgi.cache_set(key, json.dumps(styleProfile)) profile["profile"]["dependents"]["styleMD5"] = styleProfile["styleMD5"] return profile["profile"]