コード例 #1
0
ファイル: server_jsonrpc.py プロジェクト: yakkobr/gbix
def methods():

    gbix_methods_cached = cache_get('/api/methods',
                                    cache_options['default']['name'])

    if not gbix_methods_cached:
        try:
            arq = os.path.join(
                os.path.split(os.path.split(os.path.realpath(__file__))[0])[0],
                'doc/api_data.json')
            f = open(arq)
        except IOError:
            return 'Methods file not found.'
        methods_json = str()
        pattern = re.compile('\s+"filename')
        for line in f.readlines():
            if re.match(pattern, line) is None:
                methods_json += line.strip().replace('<p>', '').replace(
                    '</p>', '').replace(' "', '"').replace('\\"', '')
        f.close()
        cache_set('/api/methods', methods_json, 0,
                  cache_options['default']['name'])
        return methods_json
    else:
        return gbix_methods_cached
コード例 #2
0
ファイル: isitup.py プロジェクト: SpazioDati/isitup
def index():
    try:
        url = request.params['url']
    except KeyError:
        abort(400, 'Please provide a url')
        return  # only for pylint

    url_hash = hash(url)
    cache_key = str(url_hash)

    lock_idx = url_hash % LOCKS
    with lock(lock_idx):
        cache = uwsgi.cache_get(cache_key)
        if cache == 'd':
            return invalid()
        if cache == 'u':
            return url

        try:
            req = urllib2.Request(
                url,
                headers={'User-Agent': USERAGENT}
            )
            urllib2.urlopen(req, timeout=10)
        except:
            uwsgi.cache_set(cache_key, 'd', EXPIRE)
            return invalid()
        else:
            uwsgi.cache_set(cache_key, 'u', EXPIRE)
            return url
コード例 #3
0
 def wrapper(*args, **kwargs):
     wrapper.cfg = config_method(method)  # __wrapped__
     cache_id = wrapper.cfg.get(METHOD_CACHE_ID, None)
     cache_on = bool(cache_id)  # not not cache_id
     cache_expires = int(wrapper.cfg.get(METHOD_CACHE_EXPIRES, "0"))
     args_spec = inspect.getfullargspec(func).args
     if cache_key_name in kwargs:
         key_val = kwargs[
             cache_key_name] if cache_key_name in args_spec else kwargs.pop(
                 cache_key_name)
     else:
         key_val = args[args_spec.index(
             cache_key_name)]  # if not exists KeyError will be raised
     if isinstance(key_val, dict):
         key_val = hash(
             tuple(sorted(key_val.items(), key=lambda item: item[0])))
     key_val = f'{method}:{str(key_val)}'
     if cache_on and uwsgi.cache_exists(key_val, cache_id):
         value = uwsgi.cache_get(key_val, cache_id)
         value = pickle.loads(value)
         logging.debug('cache [%s] -> %s : %s', cache_id, key_val,
                       value)
         return value
     result = func(*args, **kwargs)
     if cache_on:
         value = pickle.dumps(result)
         uwsgi.cache_set(key_val, value, cache_expires, cache_id)
         logging.debug('cache [%s] <- %s expires %d : %s', cache_id,
                       key_val, cache_expires, value)
     return result
コード例 #4
0
ファイル: webtest1.py プロジェクト: th0ma5w/polybius
def memrefresh(feedurl):
	value = jsonld_from_feedparser(loaded_feed(unquote(feedurl),repo,True).feed)
	if cache_exists(feedurl):
		cache_update(feedurl,value,3600*24)
	else:
		cache_set(feedurl,value,3600*24)
	return value
コード例 #5
0
ファイル: webtest1.py プロジェクト: th0ma5w/polybius
def memfeed(feedurl):
	if cache_exists(feedurl):
		return cache_get(feedurl)
	else:
		value = jsonld_from_feedparser(loaded_feed(unquote(feedurl),repo).feed) 
		cache_set(feedurl,value,3600*24)
		return value
コード例 #6
0
ファイル: uwsgi.py プロジェクト: jeis2497052/python-client
    def log_event(self, event):
        """Adds an impression to the log if it is enabled, otherwise the impression is dropped.
        :param impression: The impression tuple
        :type impression: Impression
        """
        cache_event = dict(event._asdict())

        try:
            self.__lock_events()
            if self._adapter.cache_exists(self._EVENTS_KEY,
                                          _SPLITIO_STATS_CACHE_NAMESPACE):
                events = decode(
                    self._adapter.cache_get(self._EVENTS_KEY,
                                            _SPLITIO_STATS_CACHE_NAMESPACE))
            else:
                events = []

            if len(events) < self._events_queue_size:
                events.append(cache_event)
                _logger.debug('Adding event to cache: {}.'.format(event))
                self._adapter.cache_update(self._EVENTS_KEY, encode(events), 0,
                                           _SPLITIO_STATS_CACHE_NAMESPACE)
                return True

            # Set a key to force an events flush
            uwsgi.cache_set(self._EVENTS_FLUSH, '1', 0,
                            _SPLITIO_STATS_CACHE_NAMESPACE)
            return False
        finally:
            self.__unlock_events()
コード例 #7
0
ファイル: app.py プロジェクト: iqer/uwsgi_cache_perf_demo
def demo_set():
    # demo = TestDemo()
    # print(str(demo))
    # print(sys.getsizeof(demo))
    demo = 'a' * 2000
    print('++++++++++++++++++++++++++')
    print('Size:', sys.getsizeof(demo))
    print('++++++++++++++++++++++++++')

    try:
        # for k, v in uwsgi.__dict__.iteritems():
        #     print('{}: {}'.format(k, v))
        # from remote_pdb import RemotePdb

        # RemotePdb('127.0.0.1', 4444).set_trace()
        # demo = uwsgi.cache_exists('demo')
        global num
        if num > 10:
            num = 0
        key = 'cache_{}'.format(num)
        demo = pickle.dumps(demo)
        uwsgi.cache_set(key, demo, 0, 'mycache')
        print(len(uwsgi.cache_get(key, 'mycache')))
        num += 1
        return str(num)

    except:
        import traceback
        print(traceback.print_exc())
        return 'not set'
コード例 #8
0
ファイル: webtest1.py プロジェクト: th0ma5w/polybius
def memopml(opmlurl):
	if cache_exists(opmlurl):
		return cache_get(opmlurl)
	else:
		value = jsonld_from_opml(loaded_opml(unquote(opmlurl),repo).opml)
		cache_set(opmlurl,value,3600*24)
		return value
コード例 #9
0
ファイル: cache.py プロジェクト: sigsergv/pyrone
def set_value(key, value):
    if UWSGI:
        if uwsgi.cache_exists(key):
            uwsgi.cache_update(key, pickle.dumps(value))
        else:
            uwsgi.cache_set(key, pickle.dumps(value))
    else:
        _cache[key] = value
コード例 #10
0
 def test_non_bitmap(self):
     self.assertTrue(uwsgi.cache_set('KEY', 'X' * 20, 0,
                                     'items_non_bitmap'))
     self.assertTrue(uwsgi.cache_del('KEY', 'items_non_bitmap'))
     self.assertIsNone(
         uwsgi.cache_set('KEY', 'X' * 21, 0, 'items_non_bitmap'))
     self.assertTrue(uwsgi.cache_set('KEY', 'X' * 20, 0,
                                     'items_non_bitmap'))
コード例 #11
0
 def set(self, key, value, expires=0):
     key = str(key)
     value = pickle.dumps(value)
     if uwsgi.cache_exists(key, self.cachename):
         uwsgi.cache_update(key, value, expires, self.cachename)
     else:
         uwsgi.cache_set(key, value, expires, self.cachename)
     self._keys.add(key)
コード例 #12
0
 def test_big_random(self):
     blob = self.rand_blob(100000)
     self.assertTrue(uwsgi.cache_set('KEY', blob, 0, 'items_1_100000'))
     get_blob = uwsgi.cache_get('KEY', 'items_1_100000')
     self.assertEqual(blob, get_blob)
     self.assertTrue(uwsgi.cache_del('KEY', 'items_1_100000'))
     self.assertIsNone(uwsgi.cache_set('KEY', 'X' * 100001, 0, 'items_1_100000'))
     self.assertTrue(uwsgi.cache_set('KEY', 'X' * 10000, 0, 'items_1_100000'))
コード例 #13
0
 def test_lru(self):
     self.assertTrue(uwsgi.cache_set('KEY1', 'X' * 20, 0, 'items_lru'))
     self.assertTrue(uwsgi.cache_set('KEY2', 'X' * 20, 0, 'items_lru'))
     self.assertTrue(uwsgi.cache_set('KEY3', 'Y' * 20, 0, 'items_lru'))
     self.assertIsNone(uwsgi.cache_get('KEY1', 'items_lru'))
     uwsgi.cache_get('KEY3', 'items_lru')
     for i in range(4, 100):
         self.assertTrue(uwsgi.cache_set('KEY%d' % i, 'Y' * 20, 0, 'items_lru'))
         self.assertIsNone(uwsgi.cache_get('KEY%d' % (i-2), 'items_lru'))
コード例 #14
0
ファイル: views.py プロジェクト: 50onRed/wandersafe
def reset_debug_level(level):
    import uwsgi
    if level == '-1':
        uwsgi.cache_del("DEBUG")
    else:
        if uwsgi.cache_exists("DEBUG"):
            uwsgi.cache_update("DEBUG", level)
        else:
            uwsgi.cache_set("DEBUG", level)
    return redirect(url_for('tell_me_if_im_going_to_die', lat=39.9708657, lon=-75.1427425, meters=1000))
コード例 #15
0
def increment_request_count(user_id):
    worker_id = str(uwsgi.worker_id())

    if uwsgi.cache_get(worker_id):
        c = int(uwsgi.cache_get(worker_id))
        c += 1
        uwsgi.cache_update(worker_id, str(c))
    else:
        uwsgi.cache_set(worker_id, '0')

    return f"user_id:{user_id}:workder_id:{worker_id}:request_number:{uwsgi.cache_get(worker_id).decode()}"
コード例 #16
0
ファイル: service2.py プロジェクト: kasworld/tiny_uwsgi
    def __init__(self):
        ServiceBase.__init__(self)
        ProfileMixin.__init__(self)
        DispatcherMixin_CRR.__init__(self)

        if not uwsgi.cache_exists('Service2Counter'):
            uwsgi.cache_set('Service2Counter', '0')
        if not uwsgi.cache_exists('Service2Timer'):
            uwsgi.cache_set('Service2Timer', '0')
        print uwsgi.queue_size
        gevent.spawn(microtask, uwsgi.worker_id())
        print 'after gevent.spawn'
コード例 #17
0
    def test_multi_delete(self):
        for i in range(0, 100):
            self.assertTrue(uwsgi.cache_set('key1', 'X' * 50 , 0, 'items_4_10'))
            self.assertTrue(uwsgi.cache_del('key1', 'items_4_10'))

        for i in range(0, 100):
            self.assertIsNone(uwsgi.cache_set('key1', 'X' * 51 , 0, 'items_4_10'))
            self.assertIsNone(uwsgi.cache_del('key1', 'items_4_10'))

        for i in range(0, 100):
            self.assertTrue(uwsgi.cache_set('key1', 'X' * 50 , 0, 'items_4_10'))
            self.assertTrue(uwsgi.cache_del('key1', 'items_4_10'))
コード例 #18
0
def getAllTriggersAlarming():
    triggerCached = cache_get('triggerTelao',
                              cache_options['triggerGet']['name'])
    if triggerCached:
        return json.loads(triggerCached)
    elif cache_get('updatingCache',
                   cache_options['updates']['name']) == 'True':
        while cache_get('updatingCache',
                        cache_options['updates']['name']) == 'True':
            time.sleep(0.3)
        else:
            return json.loads(
                cache_get('triggerTelao', cache_options['updates']['name']))
    else:
        if cache_exists('updatingCache', cache_options['updates']['name']):
            cache_update('updatingCache', 'True',
                         cache_options['updates']['expiration_time'],
                         cache_options['updates']['name'])
        else:
            cache_set('updatingCache', 'True',
                      cache_options['updates']['expiration_time'],
                      cache_options['updates']['name'])

        admin = Admin()
        zbx_admin_token = admin.auth()

        triggers = fowardZ.sendToZabbix(method='trigger.get',
                                        params={
                                            'selectHosts': ["name"],
                                            'selectGroups': ['groups'],
                                            'selectLastEvent':
                                            ['lastEvent', 'acknowledged'],
                                            'expandComment':
                                            1,
                                            'expandDescription':
                                            1,
                                            'only_true':
                                            1,
                                            'output':
                                            'extend'
                                        },
                                        auth=zbx_admin_token)

        cache_set('triggerTelao', json.dumps(triggers),
                  cache_options['triggerGet']['expiration_time'],
                  cache_options['triggerGet']['name'])
        cache_update('updatingCache', 'False',
                     cache_options['updates']['expiration_time'],
                     cache_options['updates']['name'])

    return triggers
コード例 #19
0
def himawari8(target):
    last_updatetime = bottle.request.query.get("updatetime")
    getLayers = bottle.request.query.get("getLayers")
    https_verify = (bottle.request.query.get("https_verify")
                    or "true").lower() == "true"
    baseUrl = bottle.request.url[0:bottle.request.url.find("/hi8")]
    if not getLayers.startswith("http"):
        getLayers = "{}{}".format(baseUrl, getLayers)

    key = "himawari8.{}".format(target)
    result = None
    getcaps = None
    if uwsgi.cache_exists("himawari8"):
        if uwsgi.cache_exists(key):
            result = json.loads(uwsgi.cache_get(key))
        else:
            getcaps = uwsgi.cache_get("himawari8").decode("utf-8")
    else:
        res = requests.get(getLayers, verify=https_verify)
        res.raise_for_status()
        getcaps = res.content
        uwsgi.cache_set("himawari8", getcaps, 60 * 10)  # cache for 10 mins
        getcaps = getcaps.decode("utf-8")

    if not result:
        layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps)
        layers = []
        for layer in layernames:
            layers.append([
                settings.PERTH_TIMEZONE.localize(
                    datetime.datetime.strptime(
                        re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")),
                layer
            ])
        layers = sorted(layers, key=lambda layer: layer[0])
        for layer in layers:
            layer[0] = (layer[0]).strftime("%a %b %d %Y %H:%M:%S AWST")
        result = {
            "servers": [baseUrl + FIREWATCH_SERVICE],
            "layers": layers,
            "updatetime": layers[len(layers) - 1][0]
        }
        uwsgi.cache_set(key, json.dumps(result), 60 * 10)  # cache for 10 mins

    if len(result["layers"]) == 0:
        return bottle.HTTPResponse(status=404)
    elif last_updatetime and last_updatetime == result["updatetime"]:
        bottle.response.status = 290
        return "{}"
    else:
        return result
コード例 #20
0
def get_picklist(listname):
    if uwsgi.cache_exists(listname):
        return pickle.loads(uwsgi.cache_get(listname))
    else:
        # three lists have special GETs but eventually they will be a picklist
        if listname == "tumorType":
            #            picklist_values['tumorType']={ "cache_date": time.time(), "values":cache_oncotree()}
            uwsgi.cache_set(listname, pickle.dumps(cache_oncotree()), 900)
        elif listname == "Tag":
            #            picklist_values['Tag']={ "cache_date": time.time(), "values": cache_barcodes()}
            uwsgi.cache_set(listname, pickle.dumps(cache_barcodes()), 900)
            if uwsgi.cache_get(listname) == None:
                return cache_barcodes()
        elif listname == "Reads+Coverage":
            uwsgi.cache_set("Reads+Coverage",
                            pickle.dumps(cache_reads_coverage()), 900)
        else:
            r = s.get(
                LIMS_API_ROOT + "/getPickListValues?list=%s" % listname,
                auth=(LIMS_USER, LIMS_PW),
                verify=False,
            )
            log_lims(r)
            picklist = []
            for value in json.loads(r.content.decode('utf-8')):
                picklist.append({"id": value, "value": value})
            uwsgi.cache_set(listname, pickle.dumps(picklist), 900)
        return pickle.loads(uwsgi.cache_get(listname))
コード例 #21
0
ファイル: cache.py プロジェクト: longears/rigor-webapp
		def set(self, key, value, timeout=None):
			'''
			Adds a new key/value pair overwriting any existing value for the key. If timeout is given,
			sets the timeout on the key to be the given timeout, otherwise it does not set one
			Returns True if the key/value pair was set successfully, False otherwise
			'''
			try:
				if timeout is None:
					return uwsgi.cache_set(key, value)
				else:
					return uwsgi.cache_set(key, value, timeout)
			except Exception as e:
				debug_error(e)
				return False
コード例 #22
0
ファイル: rpc_backend.py プロジェクト: olekas/easy-pyrpc
def register(source_hash, method_name, packed_data):
    logging.info("{}: {}".format(request.method, request.url))
    data = rpc_data_unpack(request.get_data())
    logging.debug(str(data))

    fn_data = {method_name: data}

    if cache_man.cache_exists(source_hash):
        reg_dump = pickle.loads(cache_man.cache_get(source_hash))
        reg_dump.update(fn_data)
        cache_man.cache_update(source_hash, pickle.dumps(reg_dump),
                               settings.DEFAULT_CACHE_TTL)
    else:
        cache_man.cache_set(source_hash, pickle.dumps(fn_data),
                            settings.DEFAULT_CACHE_TTL)
コード例 #23
0
 def objects(cls):
     if not uwsgi.cache_exists("raw_aws"):
         if hasattr(cls, "_objects"):
             del cls._objects
         uwsgi.cache_set("raw_aws", subprocess.check_output(["aws", "ec2", "describe-instances", "--no-paginate"]), 60*15)
     raw = json.loads(uwsgi.cache_get("raw_aws").decode("utf-8"))
     if hasattr(cls, "_objects"):
         return cls._objects
     objects = {}
     for data in raw["Reservations"]:
         for instance_data in data["Instances"]:
             instance = Instance(instance_data=instance_data)
             objects[instance.instance_id] = instance
     cls._objects = objects
     return objects  # A dict
コード例 #24
0
ファイル: wrapper.py プロジェクト: fossabot/beecell
 def cache_set(self, key, value, expire=None, cache_server=None):
     '''
     Set a value in the cache. 
     key : The cache key to write.
     write : The cache value to write.
     expire : Expiry time of the value, in seconds.
     cache_server : The UNIX/TCP socket where the cache portal2 is listening. Optional.
     '''
     if expire != None:
         return uwsgi.cache_set(key, value, expire)
     elif cache_server != None:
         return uwsgi.cache_set(key, value, cache_server)
     elif expire != None and cache_server != None:
         return uwsgi.cache_set(key, value, expire, cache_server)
     else:
         return uwsgi.cache_set(key, value)
コード例 #25
0
 def test_big_update(self):
     self.assertTrue(uwsgi.cache_set('key1', 'X' * 40, 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 10, 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_4_10'))
     self.assertIsNone(uwsgi.cache_update('key1', 'X' * 51, 0,
                                          'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 50, 0, 'items_4_10'))
コード例 #26
0
def loop():
    while True:
        key = uwsgi.mule_get_msg()
        key = key.decode('utf-8')
        ids = key.split('_')
        uwsgi.cache_set(key, 'inprogress')
        try:
            result = bfs(ids[0], ids[1])
        except:
            uwsgi.cache_update(key, 'fail')
        else:
            if result:
                uwsgi.cache_update(key, 'found')
                print(key)
            else:
                uwsgi.cache_update(key, 'notfound')
コード例 #27
0
ファイル: __init__.py プロジェクト: scottp-dpaw/gokart
def himawari8(target):
    if uwsgi.cache_exists("himawari8"):
        getcaps = uwsgi.cache_get("himawari8")
    else:
        getcaps = requests.get(FIREWATCH_GETCAPS).content
        uwsgi.cache_set("himawari8", getcaps, 60*10)  # cache for 10 mins
    getcaps = getcaps.decode("utf-8")
    layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps)
    layers = []
    for layer in layernames:
        layers.append([FIREWATCH_TZ.localize(datetime.strptime(re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")).isoformat(), layer])
    result = {
        "servers": [FIREWATCH_SERVICE],
        "layers": layers
    }
    return result
コード例 #28
0
def bom(target):
    last_updatetime = bottle.request.query.get("updatetime")
    layeridpattern = bottle.request.query.get("layeridpattern")
    if layeridpattern:
        layeridpattern = "bom:{}".format(layeridpattern)
    else:
        layeridpattern = "bom:{}{{:0>3}}".format(target)
    current_timeline = None
    try:
        current_timeline = json.loads(uwsgi.cache_get(target))
    except:
        current_timeline = None

    bottle.response.set_header("Content-Type", "application/json")
    bottle.response.status = 200
    if current_timeline and datetime.datetime.now(
    ) - datetime.datetime.strptime(
            current_timeline["refreshtime"],
            "%a %b %d %Y %H:%M:%S") < datetime.timedelta(minutes=5):
        # data is refreshed within 5 minutes, use the result directly
        if current_timeline["updatetime"] == last_updatetime:
            # return 304 cause "No element found" error, so return a customized code to represent the same meaning as 304
            bottle.response.status = 290
            return "{}"
        else:
            return {
                "layers": current_timeline["layers"],
                "updatetime": current_timeline["updatetime"]
            }

    timeline = getTimelineFromWmsLayer(current_timeline,
                                       bomLayerIdFunc(layeridpattern))

    if not timeline:
        raise "Missing some of http parameters 'basetimelayer', 'timelinesize', 'layertimespan'."

    if not current_timeline or id(timeline) != id(current_timeline):
        uwsgi.cache_set(target, json.dumps(timeline), 0)

    if timeline["updatetime"] == last_updatetime:
        bottle.response.status = 290
        return "{}"
    else:
        return {
            "layers": timeline["layers"],
            "updatetime": timeline["updatetime"]
        }
コード例 #29
0
def callback():
    startup.getUserToken(request.args.get('code'))
    if not uwsgi.cache_exists('isRunning'):
        app.logger.info(
            "Creating new thread for refreshing spotify token and user stats.")
        uwsgi.cache_set('isRunning', 'True')
        uwsgi.cache_set('stop_threads', 'False')
        sp_t = spotify_thread(2500, "Thread-spotify")
        sp_t.start()
    try:
        if uwsgi.cache_get('isRunning').decode(
                'utf-8') == 'True' and uwsgi.cache_get('stop_threads').decode(
                    'utf-8') == 'True':
            app.logger.info("Relancement de l'application spotify")
            uwsgi.cache_update('stop_threads', 'False')
    except AttributeError:
        app.logger.error(
            f"La variable isRunning ou stop_threads n'est pas initialisée, valeurs : ir:{uwsgi.cache_get('isRunning')} et st:{uwsgi.cache_get('stop_threads')}"
        )
    list_time_range = ['short_term', 'medium_term', 'long_term']
    list_type = ['artists', 'tracks']
    dict_index = {
        'short_term_artists': 1,
        'medium_term_artists': 2,
        'long_term_artists': 3,
        'short_term_tracks': 4,
        'medium_term_tracks': 5,
        'long_term_tracks': 6
    }

    for type in list_type:
        for time_range in list_time_range:
            set_analytics_data(
                dict_index[f"{time_range}_{type}"],
                json.dumps(
                    json.loads(
                        get_users_top(
                            startup.getAccessToken()[1],
                            type,
                            time_range,
                        ))), time_range, type)

    app.logger.info(
        f"All the threads are listed below : {[thread.name for thread in threading.enumerate()]}"
    )

    return redirect(url_for('project_spotify.spotify'))
コード例 #30
0
 def objects(cls):
     if not uwsgi.cache_exists("raw_aws"):
         if hasattr(cls, "_objects"):
             del cls._objects
         uwsgi.cache_set(
             "raw_aws",
             subprocess.check_output(
                 ["aws", "ec2", "describe-instances", "--no-paginate"]),
             60 * 15)
     raw = json.loads(uwsgi.cache_get("raw_aws").decode("utf-8"))
     if hasattr(cls, "_objects"):
         return cls._objects
     objects = {}
     for data in raw["Reservations"]:
         for instance_data in data["Instances"]:
             instance = Instance(instance_data=instance_data)
             objects[instance.instance_id] = instance
     cls._objects = objects
     return objects
コード例 #31
0
ファイル: server_jsonrpc.py プロジェクト: globocom/gbix
def methods():

    gbix_methods_cached = cache_get('/api/methods', cache_options['default']['name'])

    if not gbix_methods_cached:
        try:
            arq = os.path.join(os.path.split(os.path.split(os.path.realpath(__file__))[0])[0], 'doc/api_data.json')
            f = open(arq)
        except IOError:
            return 'Methods file not found.'
        methods_json = str()
        pattern = re.compile('\s+"filename')
        for line in f.readlines():
            if re.match(pattern, line) is None:
                methods_json += line.strip().replace('<p>', '').replace('</p>', '').replace(' "', '"').replace('\\"', '')
        f.close()
        cache_set('/api/methods', methods_json, 0, cache_options['default']['name'])
        return methods_json
    else:
        return gbix_methods_cached
コード例 #32
0
ファイル: __init__.py プロジェクト: parksandwildlife/gokart
def himawari8(target):
    last_updatetime = bottle.request.query.get("updatetime")
    baseUrl = bottle.request.url[0:bottle.request.url.find("/hi8")]
    key = "himawari8.{}".format(target)
    result = None
    getcaps = None
    if uwsgi.cache_exists("himawari8"):
        if uwsgi.cache_exists(key):
            result = json.loads(uwsgi.cache_get(key))
        else:
            getcaps = uwsgi.cache_get("himawari8")
    else:
        res = requests.get("{}{}".format(baseUrl,FIREWATCH_GETCAPS),verify=FIREWATCH_HTTPS_VERIFY)
        res.raise_for_status()
        getcaps = res.content
        getcaps = getcaps.decode("utf-8")
        uwsgi.cache_set("himawari8", getcaps, 60*10)  # cache for 10 mins

    if not result:
        layernames = re.findall("\w+HI8\w+{}\.\w+".format(target), getcaps)
        layers = []
        for layer in layernames:
            layers.append([settings.PERTH_TIMEZONE.localize(datetime.datetime.strptime(re.findall("\w+_(\d+)_\w+", layer)[0], "%Y%m%d%H%M")), layer])
        layers = sorted(layers,key=lambda layer:layer[0])
        for layer in layers:
            layer[0] = (layer[0]).strftime("%a %b %d %Y %H:%M:%S AWST")
        result = {
            "servers": [baseUrl + FIREWATCH_SERVICE],
            "layers": layers,
            "updatetime":layers[len(layers) - 1][0]
        }
        uwsgi.cache_set(key, json.dumps(result), 60*10)  # cache for 10 mins

    if len(result["layers"]) == 0:
        return bottle.HTTPResponse(status=404)
    elif last_updatetime and last_updatetime == result["updatetime"]:
        bottle.response.status = 290
        return "{}"
    else:
        return result
コード例 #33
0
ファイル: server_jsonrpc.py プロジェクト: yakkobr/gbix
def healthcheck():
    fe_status_cached = cache_get('gbix_healthcheck',
                                 cache_options['default']['name'])

    if fe_status_cached:
        return fe_status_cached

    else:
        try:
            fe_status = fowardZ.sendRequest('{"jsonrpc": "2.0", \
                                        "method": "apiinfo.version", \
                                        "params": [], \
                                        "id": 1 \
                                        }')
        except:
            status_string = 'FAILED ' + socket.gethostname(
            ) + ' Sem conexao com o FE do Zabbix'
            cache_set('gbix_healthcheck', status_string, 3,
                      cache_options['default']['name'])
            return status_string

        if 'result' in fe_status:
            status_string = 'WORKING ' + socket.gethostname()
            cache_set('gbix_healthcheck', status_string, 5,
                      cache_options['default']['name'])
            return status_string
        else:
            status_string = 'FAILED ' + socket.gethostname(
            ) + 'Sem conexao com o FE do Zabbix'
            cache_set('gbix_healthcheck', status_string, 3,
                      cache_options['default']['name'])
            return status_string
コード例 #34
0
ファイル: server_jsonrpc.py プロジェクト: globocom/gbix
def healthcheck():
    fe_status_cached = cache_get('gbix_healthcheck', cache_options['default']['name'])

    if fe_status_cached:
        return fe_status_cached

    else:
        try:
            fe_status = fowardZ.sendRequest('{"jsonrpc": "2.0", \
                                        "method": "apiinfo.version", \
                                        "params": [], \
                                        "id": 1 \
                                        }')
        except:
            status_string = 'FAILED ' + socket.gethostname() + ' Sem conexao com o FE do Zabbix'
            cache_set('gbix_healthcheck', status_string, 3, cache_options['default']['name'])
            return status_string

        if 'result' in fe_status:
                status_string = 'WORKING ' + socket.gethostname()
                cache_set('gbix_healthcheck', status_string, 5, cache_options['default']['name'])
                return status_string
        else:
            status_string = 'FAILED ' + socket.gethostname() + 'Sem conexao com o FE do Zabbix'
            cache_set('gbix_healthcheck', status_string, 3, cache_options['default']['name'])
            return status_string
コード例 #35
0
ファイル: rpmcache.py プロジェクト: UweMenges/rpmcache
def get_url(url):
    """Download a file from url to cache_dir."""
    # set a lock to prevent multiple simultaneous downloads of the same file
    mypid = os.getpid()
    uwsgi.lock()
    otherpid = uwsgi.cache_get(url)
    if otherpid:
        uwsgi.unlock()
        while otherpid:
            log('D: [%d] waiting for pid %s to download %s' %
                (mypid, otherpid, url))
            time.sleep(1)
            otherpid = uwsgi.cache_get(url)
        return 200
    else:
        uwsgi.cache_set(url, str(mypid))
        uwsgi.unlock()

    dest = localfile(url)
    log('D: [%d] downloading %s to %s' % (mypid, url, dest))
    curl = pycurl.Curl()
    curl.setopt(curl.URL, url)
    curl.setopt(curl.FOLLOWLOCATION, True)
    path = '/'.join(dest.split('/')[:-1])
    if not os.path.exists(path):
        # parallel download of rpms in subdir will create it right now
        try:
            os.makedirs(path)
        except OSError as e:
            # this catches duplicate creation (so just W not E)
            # TODO: need to bypass the open() on real errors
            # like permissions
            log('W: [%d] OS error(%d): %s' %
                (mypid, e.errno, e.strerror))
    with open(dest, 'wb') as fil:
        curl.setopt(curl.WRITEFUNCTION, fil.write)
        curl.perform()
    uwsgi.cache_del(url)
    return curl.getinfo(curl.HTTP_CODE)
コード例 #36
0
def create_tests():
    """Execute tests upon successful POST."""
    payload = request.get_json()
    if payload is None:
        return make_response(
            jsonify({"error": "Valid request payload not found."}), 400)
    test_count = 0
    for tests in payload.values():
        test_count += len(tests)
    if test_count > CONFIG["max_test_count"]:
        return make_response(
            jsonify({
                "error":
                f"Provided number of tests is too high. Max: {CONFIG['max_test_count']}"
            }),
            400,
        )
    # Generate the client's receipt and pass the test payload to a background thread to be executed.
    receipt = token_hex(16)
    uwsgi.cache_set(receipt, "{}", 600, "receipts")
    execute_tests(receipt, payload, CONFIG["max_process_count"])
    return jsonify({"receipt": receipt})
コード例 #37
0
 def test_big_delete(self):
     self.assertTrue(uwsgi.cache_set('key1', 'X' * 50 , 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_4_10'))
     self.assertTrue(uwsgi.cache_set('key1', 'HELLOHELLO', 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_set('key2', 'HELLOHELLO', 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_set('key3', 'HELLOHELLO', 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_set('key4', 'HELLOHELLO', 0, 'items_4_10'))
     self.assertIsNone(uwsgi.cache_set('key5', 'HELLOHELLO', 0, 'items_4_10'))
コード例 #38
0
ファイル: rpmcache.py プロジェクト: UweMenges/rpmcache
def get_url(url):
    """Download a file from url to cache_dir."""
    # set a lock to prevent multiple simultaneous downloads of the same
    # file
    mypid = os.getpid()
    uwsgi.lock()
    otherpid = uwsgi.cache_get(url)
    if otherpid:
        uwsgi.unlock()
        while otherpid:
            log('D: pid %d waiting for pid %s to download %s' %
                (mypid, otherpid, url))
            time.sleep(1)
            otherpid = uwsgi.cache_get(url)
        return 200
    else:
        uwsgi.cache_set(url, str(mypid))
        uwsgi.unlock()

    dest = localfile(url)
    log('D: pid %d downloading %s' % (mypid, url))
    curl = pycurl.Curl()
    curl.setopt(curl.URL, url)
    path = '/'.join(dest.split('/')[:-1])
    if not os.path.exists(path):
        # parallel download of rpms in subdir will create it right now
        try:
            os.makedirs(path)
        except OSError as e:
            # this catches duplicate creation (so just W not E)
            # TODO: need to bypass the open() on real errors
            # like permissions
            log('W: OS error(%d): %s' % (e.errno, e.strerror))
    with open(dest, 'wb') as fil:
        curl.setopt(curl.WRITEFUNCTION, fil.write)
        curl.perform()
    uwsgi.cache_del(url)
    return curl.getinfo(curl.HTTP_CODE)
コード例 #39
0
ファイル: __init__.py プロジェクト: parksandwildlife/gokart
def bom(target):
    last_updatetime = bottle.request.query.get("updatetime")
    layeridpattern = bottle.request.query.get("layeridpattern")
    if layeridpattern:
        layeridpattern = "bom:{}".format(layeridpattern)
    else:
        layeridpattern = "bom:{}{{:0>3}}".format(target)
    current_timeline = None
    try:
        current_timeline = json.loads(uwsgi.cache_get(target))
    except:
        current_timeline = None

    bottle.response.set_header("Content-Type", "application/json")
    bottle.response.status = 200
    if current_timeline and datetime.datetime.now() - datetime.datetime.strptime(current_timeline["refreshtime"], "%a %b %d %Y %H:%M:%S") < datetime.timedelta(minutes=5):
        # data is refreshed within 5 minutes, use the result directly
        if current_timeline["updatetime"] == last_updatetime:
            # return 304 cause "No element found" error, so return a customized code to represent the same meaning as 304
            bottle.response.status = 290
            return "{}"
        else:
            return {"layers": current_timeline["layers"], "updatetime": current_timeline["updatetime"]}

    timeline = getTimelineFromWmsLayer(current_timeline, bomLayerIdFunc(layeridpattern))

    if not timeline:
        raise "Missing some of http parameters 'basetimelayer', 'timelinesize', 'layertimespan'."

    if not current_timeline or id(timeline) != id(current_timeline):
        uwsgi.cache_set(target, json.dumps(timeline), 0)

    if timeline["updatetime"] == last_updatetime:
        bottle.response.status = 290
        return "{}"
    else:
        return {"layers": timeline["layers"], "updatetime": timeline["updatetime"]}
コード例 #40
0
 def set(self, cache, key, value):
     uwsgi.cache_set(key, value, 3600, cache)
コード例 #41
0
def get_layermetadata(layerids, kmiserver=settings.KMI_SERVER, results={}):
    multiple_layers = True
    if isinstance(layerids, basestring):
        layerids = [layerids]
        multiple_layers = False
    #group layers against layer workspace
    layers = {}
    for layerid in layerids:
        layerid = layerid.strip()
        #check whether it is cached or not
        key = layermetadatakey(layerid)
        if uwsgi.cache_exists(key):
            try:
                metadata = uwsgi.cache_get(key)
                if metadata:
                    if layerid in results:
                        results[layerid].update(json.loads(metadata))
                    else:
                        results[layerid] = json.loads(metadata)
                    #print("Retrieve the metadata from cache for layer ({})".format(layerid))
                    continue
            except:
                pass

        layer = layerid.split(":")

        if len(layer) == 1:
            #no workspace
            layer_ws = ""
            layer = layer[0]
        else:
            layer_ws = layer[0]
            layer = layer[1]

        if layer_ws not in layers:
            layers[layer_ws] = [layer]
        else:
            layers[layer_ws].append(layer)

    if layers:
        session_cookie = settings.get_session_cookie()
        kmiserver = get_kmiserver(kmiserver)
        #find the layer's metadata
        url = None
        for layer_ws, layers in layers.iteritems():
            if layer_ws:
                url = "{}/{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(
                    kmiserver, layer_ws)
            else:
                url = "{}/wms?service=wms&version=1.1.1&request=GetCapabilities".format(
                    kmiserver)

            res = requests.get(url, verify=False, cookies=session_cookie)
            res.raise_for_status()

            tree = ET.fromstring(res.content)

            capability = tree.find('Capability')
            if not len(capability):
                raise Exception("getCapability failed")
            kmi_layers = capability.findall("Layer")
            while kmi_layers:
                kmi_layer = kmi_layers.pop()
                name = get_child_value(kmi_layer, "Name")

                if name:
                    try:
                        index = layers.index(name)
                    except:
                        index = -1
                    if index >= 0:
                        #this layer's metadata is requsted by the user
                        if layer_ws:
                            layerid = "{}:{}".format(layer_ws, name)
                        else:
                            layerid = name

                        if layerid in results:
                            result = results[layerid]
                        else:
                            result = {"id": layerid}
                            results[layerid] = result

                        del layers[index]

                        result["title"] = get_child_value(kmi_layer, "Title")
                        result["abstract"] = get_child_value(
                            kmi_layer, "Abstract")
                        result["srs"] = get_child_value(kmi_layer, "SRS")
                        bbox = kmi_layer.find("LatLonBoundingBox")
                        if bbox is not None:
                            result["latlonBoundingBox"] = [
                                float(bbox.attrib["miny"]),
                                float(bbox.attrib["minx"]),
                                float(bbox.attrib["maxy"]),
                                float(bbox.attrib["maxx"])
                            ]
                        else:
                            result["latlonBoundingBox"] = None
                        for bbox in kmi_layer.findall("BoundingBox"):
                            result["latlonBoundingBox_{}".format(
                                bbox.attrib["SRS"].upper())] = [
                                    float(bbox.attrib["miny"]),
                                    float(bbox.attrib["minx"]),
                                    float(bbox.attrib["maxy"]),
                                    float(bbox.attrib["maxx"])
                                ]

                        #cache it for 6 hours
                        key = layermetadatakey(result["id"])
                        try:
                            if uwsgi.cache_exists(key):
                                uwsgi.cache_update(key, json.dumps(result),
                                                   6 * 3600)
                            else:
                                uwsgi.cache_set(key, json.dumps(result),
                                                6 * 3600)
                        except:
                            pass

                        #print("Retrieve the metadata from kmi for layer ({})".format(result["id"]))

                        if len(layers):
                            continue
                        else:
                            #already find metadata for all required layers
                            break
                sub_layers = kmi_layer.findall("Layer")
                if sub_layers:
                    kmi_layers += sub_layers

            if len(layers) == 1:
                if layer_ws:
                    raise Exception("The layer({}:{}) Not Found".format(
                        layer_ws, layers[0]))
                else:
                    raise Exception("The layer({}) Not Found".format(
                        layers[0]))
            elif len(layers) > 1:
                if layer_ws:
                    raise Exception("The layers({}) Not Found".format(",".join(
                        ["{}:{}".format(layer_ws, l) for l in layers])))
                else:
                    raise Exception("The layers({}) Not Found".format(
                        ",".join(layers)))

    if multiple_layers:
        return results
    else:
        return results[layerids[0]]
コード例 #42
0
ファイル: utils.py プロジェクト: GaretJax/docker-deployer
def _update_container_ip(client, container_name, cache_key):
    info = client.inspect_container(container_name)
    ip_address = info['NetworkSettings']['IPAddress']
    uwsgi.cache_set(cache_key, ip_address)
    return ip_address
コード例 #43
0

def gen_rand_n(max_n):
    return random.randint(8, max_n)


def gen_rand_s(size):
    return ''.join([random.choice(string.letters) for i in range(size)])


print 'filling cache...'
for i in range(0, 1000):
    kl = gen_rand_n(200)
    key = gen_rand_s(kl)
    vl = gen_rand_n(10000)
    val = gen_rand_s(vl)
    items[key] = val
    uwsgi.cache_set(key, val)

print 'checking cache...'
count = 0
for key in items.keys():
    val = uwsgi.cache_get(key)
    count += 1
    if val != items[key]:
        print len(val), val
        print len(items[key]), items[key]
        raise Exception('CACHE TEST FAILED AFTER %d ITERATIONS !!!' % count)

print "TEST PASSED"
コード例 #44
0
def get_layerdefinition(layerids, kmiserver=settings.KMI_SERVER, results={}):
    kmiserver = get_kmiserver(kmiserver)

    multiple_layers = True
    if isinstance(layerids, basestring):
        layerids = [layerids]
        multiple_layers = False
    #group layers against layer workspace
    layers = {}
    for layerid in layerids:
        layerid = layerid.strip()
        #check whether it is cached or not
        key = layerdefinitionkey(layerid)
        if uwsgi.cache_exists(key):
            try:
                definitiondata = uwsgi.cache_get(key)
                if definitiondata:
                    if layerid in results:
                        results[layerid].update(json.loads(definitiondata))
                    else:
                        results[layerid] = json.loads(definitiondata)
                    continue
            except:
                pass

        layer = layerid.split(":")

        if len(layer) == 1:
            #no workspace
            layer_ws = ""
            layer = layer[0]
        else:
            layer_ws = layer[0]
            layer = layer[1]

        if layer_ws not in layers:
            layers[layer_ws] = [layerid]
        else:
            layers[layer_ws].append(layerid)

    if layers:
        kmiserver = get_kmiserver(kmiserver)
        session_cookie = settings.get_session_cookie()

        url = None
        for layer_ws, layers in layers.iteritems():
            if layer_ws:
                url = "{}/{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(
                    kmiserver, layer_ws, ",".join(layers))
            else:
                url = "{}/wfs?request=DescribeFeatureType&version=2.0.0&service=WFS&outputFormat=application%2Fjson&typeName=".format(
                    kmiserver, ",".join(layers))

            res = requests.get(url, verify=False, cookies=session_cookie)
            res.raise_for_status()
            layersdata = res.json()

            for layer in layersdata.get("featureTypes") or []:
                if layer_ws:
                    layerid = "{}:{}".format(layer_ws, layer["typeName"])
                else:
                    layerid = layer["typeName"]
                try:
                    index = layers.index(layerid)
                except:
                    index = -1
                if index >= 0:
                    #this layer's metadata is requsted by the user
                    if layerid in results:
                        result = results[layerid]
                    else:
                        result = {"id": layerid}
                        results[layerid] = result

                    result["properties"] = layer["properties"]
                    result["geometry_property"] = None
                    result["geometry_properties"] = []
                    result["geometry_type"] = None
                    result["geometry_property_msg"] = None

                    del layers[index]

                    #find spatial columns
                    for prop in layer["properties"]:
                        if prop["type"].startswith("gml:"):
                            #spatial column
                            result["geometry_properties"].append(prop)

                    if len(result["geometry_properties"]) == 1:
                        result["geometry_property"] = result[
                            "geometry_properties"][0]
                        result["geometry_type"] = result[
                            "geometry_properties"][0]["localType"].lower()
                    elif len(result["geometry_properties"]) > 1:
                        #have more than one geometry properties, try to find the right one
                        if layer_ws:
                            url = "{}/{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(
                                kmiserver, layer_ws, layerid)
                        else:
                            url = "{}/ows?service=WFS&version=2.0.0&request=GetFeature&typeName={}&count=1&outputFormat=application%2Fjson".format(
                                kmiserver, layerid)

                        res = requests.get(url,
                                           verify=False,
                                           cookies=session_cookie)
                        res.raise_for_status()
                        featuresdata = res.json()
                        if len(featuresdata["features"]) > 0:
                            feat = featuresdata["features"][0]
                            for prop in result["geometry_properties"]:
                                if prop["name"] == feat["geometry_name"]:
                                    result["geometry_property"] = prop
                                    result["geometry_type"] = prop[
                                        "localType"].lower()
                                    break

                        if not result["geometry_property"]:
                            result[
                                "geometry_property_msg"] = "Layer '{}' has more than one geometry columns, can't identity which column is used as the geometry column.".format(
                                    layerid)
                    else:
                        result[
                            "geometry_property_msg"] = "Layer '{}' is not a spatial layer".format(
                                layerid)

                    if result["geometry_property"]:
                        #found the geometry property, remove it from properties
                        index = len(result["properties"]) - 1
                        while index >= 0:
                            if result["properties"][index] == result[
                                    "geometry_property"]:
                                #this is the geometry property,remove it from properties
                                del result["properties"][index]
                                break
                            index -= 1

                    #cache it for 1 day
                    key = layerdefinitionkey(layerid)
                    try:
                        if uwsgi.cache_exists(key):
                            uwsgi.cache_update(key, json.dumps(result),
                                               24 * 3600)
                        else:
                            uwsgi.cache_set(key, json.dumps(result), 24 * 3600)
                    except:
                        pass

        if len(layers) == 1:
            if layer_ws:
                raise Exception("The layer({}:{}) Not Found".format(
                    layer_ws, layers[0]))
            else:
                raise Exception("The layer({}) Not Found".format(layers[0]))
        elif len(layers) > 1:
            if layer_ws:
                raise Exception("The layers({}) Not Found".format(",".join(
                    ["{}:{}".format(layer_ws, l) for l in layers])))
            else:
                raise Exception("The layers({}) Not Found".format(
                    ",".join(layers)))

    if multiple_layers:
        return results
    else:
        return results[layerids[0]]
コード例 #45
0
ファイル: hello_world.py プロジェクト: rebx/uwsgi
import uwsgi
if uwsgi.loop == 'gevent':
    import gevent

print uwsgi.version
print uwsgi.workers()
try:
    uwsgi.cache_set('foo', "Hello World from cache")
except:
    pass
def application(env, start_response):
    if uwsgi.loop == 'gevent':
        gevent.sleep()
    start_response('200 OK', [('Content-Type', 'text/html')])
    yield "foobar<br/>"
    if uwsgi.loop == 'gevent':
        gevent.sleep(10)
    yield str(env['wsgi.input'].fileno())
    yield "<h1>Hello World</h1>"
    try:
        yield uwsgi.cache_get('foo')
    except:
        pass
コード例 #46
0
ファイル: cachebitmap.py プロジェクト: CommerceRack/uwsgi
 def test_non_bitmap(self):
     self.assertTrue(uwsgi.cache_set('KEY', 'X' * 20, 0, 'items_non_bitmap'))
     self.assertTrue(uwsgi.cache_del('KEY', 'items_non_bitmap'))
     self.assertIsNone(uwsgi.cache_set('KEY', 'X' * 21, 0, 'items_non_bitmap'))
     self.assertTrue(uwsgi.cache_set('KEY', 'X' * 20, 0, 'items_non_bitmap'))
コード例 #47
0
 def test_big_key(self):
     self.assertTrue(uwsgi.cache_set('K' * 2048, 'X' * 50 , 0, 'items_4_10'))
     self.assertIsNone(uwsgi.cache_set('K' * 2049, 'X' * 50 , 0, 'items_4_10'))
コード例 #48
0
ファイル: app.py プロジェクト: pombredanne/middleman
def _cache_set_token(token, tenant_id):
    uwsgi.cache_set(token, tenant_id, _CONFIG.cache.ttl, _CONFIG.cache.cache_name)
コード例 #49
0
ファイル: __init__.py プロジェクト: dbca-wa/gokart-lite
def _get_profile(app):
    #get app profile
    profile = None
    appPath = os.path.join(DIST_PATH,"{}.js".format(app))
    if not os.path.exists(appPath):
        appPath = os.path.join(DIST_PATH,"sss.js")

    key = "{}_profile".format(app)
    profileChanged = False
    
    if uwsgi.cache_exists(key):
        profile = uwsgi.cache_get(key)
    
    if profile:
        profile = json.loads(profile)
        if repr(os.path.getmtime(appPath)) != profile["mtime"] or os.path.getsize(appPath) != profile["size"]:
            profileChanged = True
            profile = None

    if not profile:
        file_data = None
        with open(appPath,"rb") as f:
            file_data = f.read()
        m = profile_re.search(file_data)
        profile = m.group("profile") if m else "{}"
        profile = {
            'mtime':repr(os.path.getmtime(appPath)),
            'size':os.path.getsize(appPath),
            'profile':demjson.decode(profile)
        }
        m = hashlib.md5()
        m.update(file_data)
        profile['profile']['build']['md5'] = base64.urlsafe_b64encode(m.digest()).rstrip("=")
        file_data = None
        if profileChanged:
            uwsgi.cache_update(key, json.dumps(profile))
        else:
            uwsgi.cache_set(key, json.dumps(profile))

    profile["profile"]["dependents"] = {}
    #get vendor md5
    vendorPath = os.path.join(DIST_PATH,"vendor.js")
    if not os.path.exists(vendorPath):
        raise Exception("Vendor library not found")
    key = "{}_profile".format("vendor")

    profileChanged = False
    vendorProfile = None
    if uwsgi.cache_exists(key):
        vendorProfile = uwsgi.cache_get(key)
    
    if vendorProfile:
        vendorProfile = json.loads(vendorProfile)
        if repr(os.path.getmtime(vendorPath)) != vendorProfile["mtime"] or os.path.getsize(vendorPath) != vendorProfile["size"]:
            profileChanged = True
            vendorProfile = None

    if not vendorProfile:
        m = hashlib.md5()
        with open(vendorPath,"rb") as f:
            m.update(f.read())
        vendorProfile = {
            'mtime':repr(os.path.getmtime(vendorPath)),
            'size':os.path.getsize(vendorPath),
            'vendorMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=")
        }
        if profileChanged:
            uwsgi.cache_update(key, json.dumps(vendorProfile))
        else:
            uwsgi.cache_set(key, json.dumps(vendorProfile))

    profile["profile"]["dependents"]["vendorMD5"] = vendorProfile["vendorMD5"]

    #get env profile
    envPath = os.path.join(BASE_DIST_PATH,'release','static','js',"{}-{}.env.js".format(app,ENV_TYPE))
    if not os.path.exists(envPath):
        raise Exception("'{}-{}.env.js' is missing.".format(app,ENV_TYPE))
    else:
        key = "{}_{}_profile".format("env",ENV_TYPE)
        profileChanged = False

        envProfile = None
        if uwsgi.cache_exists(key):
            envProfile = uwsgi.cache_get(key)
    
        if envProfile:
            envProfile = json.loads(envProfile)
            if repr(os.path.getmtime(envPath)) != envProfile["mtime"] or os.path.getsize(envPath) != envProfile["size"]:
                profileChanged = True
                envProfile = None

        if not envProfile:
            m = hashlib.md5()
            with open(envPath,"rb") as f:
                m.update(f.read())
            envProfile = {
                'mtime':repr(os.path.getmtime(envPath)),
                'size':os.path.getsize(envPath),
                'envMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=")
            }
            if profileChanged:
                uwsgi.cache_update(key, json.dumps(envProfile))
            else:
                uwsgi.cache_set(key, json.dumps(envProfile))

        profile["profile"]["dependents"]["envMD5"] = envProfile["envMD5"]
        profile["profile"]["envType"] = ENV_TYPE

    #get style profile
    stylePath = os.path.join(BASE_DIST_PATH,'release','static','css',"style.css")
    if not os.path.exists(stylePath):
        raise Exception("'style.css' is missing.")
    else:
        key = "style_profile"
        profileChanged = False

        styleProfile = None
        if uwsgi.cache_exists(key):
            styleProfile = uwsgi.cache_get(key)
    
        if styleProfile:
            styleProfile = json.loads(styleProfile)
            if repr(os.path.getmtime(stylePath)) != styleProfile["mtime"] or os.path.getsize(stylePath) != styleProfile["size"]:
                profileChanged = True
                styleProfile = None

        if not styleProfile:
            m = hashlib.md5()
            with open(stylePath,"rb") as f:
                m.update(f.read())
            styleProfile = {
                'mtime':repr(os.path.getmtime(stylePath)),
                'size':os.path.getsize(stylePath),
                'styleMD5':base64.urlsafe_b64encode(m.digest()).rstrip("=")
            }
            if profileChanged:
                uwsgi.cache_update(key, json.dumps(styleProfile))
            else:
                uwsgi.cache_set(key, json.dumps(styleProfile))

        profile["profile"]["dependents"]["styleMD5"] = styleProfile["styleMD5"]


    return profile["profile"]
コード例 #50
0
ファイル: heavytest.py プロジェクト: Algy/uwsgi
import uwsgi
import werkzeug.testapp

uwsgi.cache_set("/cache/get", "HTTP 1.1 200 OK\r\nContent-Type: text/html\r\n\r\n<h1>I am the uWSGI cache</h1>")


def app001(env, start_response):
    start_response('200 OK', [('Content-Type', 'text/html')])
    return "PATH_INFO=%s" % env['PATH_INFO']


def app002(env, start_response):
    start_response('200 OK', [('Content-Type', 'text/html')])
    return "requests: %d" % uwsgi.total_requests()

uwsgi.applications = {
    '': werkzeug.testapp.test_app,
    '/app001': app001,
    '/app002': app002
}
コード例 #51
0
ファイル: cachebitmap.py プロジェクト: CommerceRack/uwsgi
 def test_too_much_items(self):
     self.assertTrue(uwsgi.cache_set('key1', 'HELLO', 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_set('key2', 'HELLO', 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_set('key3', 'HELLO', 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_set('key4', 'HELLO', 0, 'items_4_10'))
     self.assertIsNone(uwsgi.cache_set('key5', 'HELLO', 0, 'items_4_10'))
コード例 #52
0
ファイル: cachebitmap.py プロジェクト: CommerceRack/uwsgi
 def test_set(self):
     self.assertTrue(uwsgi.cache_set('key1', 'HELLO', 0, 'items_17'))
     self.assertIsNone(uwsgi.cache_set('key1', 'HELLO', 0, 'items_17'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_17'))
     self.assertTrue(uwsgi.cache_set('key1', 'HELLO', 0, 'items_17'))
     self.assertIsNone(uwsgi.cache_set('key1', 'HELLO', 0, 'items_17'))
コード例 #53
0
ファイル: cachebitmap.py プロジェクト: CommerceRack/uwsgi
 def test_big_update(self):
     self.assertTrue(uwsgi.cache_set('key1', 'X' * 40 , 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 10 , 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_del('key1', 'items_4_10'))
     self.assertIsNone(uwsgi.cache_update('key1', 'X' * 51 , 0, 'items_4_10'))
     self.assertTrue(uwsgi.cache_update('key1', 'X' * 50 , 0, 'items_4_10'))