def getCanonicalData(key): assert isinstance(key, ndb.Key) MEMCACHE_KEY = "akafkljacuiudrt2po8vxdzskj" + str(key) client = Client() cached_canonical_key = client.get(MEMCACHE_KEY) if cached_canonical_key: assert (isinstance(cached_canonical_key, ndb.Key)) return cached_canonical_key data = key.get() if data is None: return None assert isinstance(data, Data) keys = Data.query().filter(Data.field == data.field).filter( Data.string == data.string).order(Data.dataId).fetch(keys_only=True) assert (len(keys) > 0) #query = data.queryDuplication() #canonical_data_key = query.get(keys_only=True) canonical_key = keys[0] canonical_data = canonical_key.get() #assert isinstance(canonical_data_key, ndb.Key) assert data.dataId >= canonical_data.dataId assert data.field == canonical_data.field assert data.string == canonical_data.string client.set(MEMCACHE_KEY, canonical_key) return canonical_key
def getKeyByFieldAndStringFromMemcache(cls, field, string): assert isinstance(field, unicode) assert isinstance(string, unicode) client = Client() key = client.get(field + string, namespace="DataKeyByFieldAndString") assert key is None or isinstance(key, db.Key) return key
def getCanonicalData(key): assert isinstance(key, ndb.Key) MEMCACHE_KEY = "akafkljacuiudrt2po8vxdzskj" + str(key) client = Client() cached_canonical_key = client.get(MEMCACHE_KEY) if cached_canonical_key: assert (isinstance(cached_canonical_key, ndb.Key)) return cached_canonical_key data = key.get() if data is None: return None assert isinstance(data, Data) keys = Data.query().filter(Data.field == data.field).filter(Data.string == data.string).order(Data.dataId).fetch( keys_only=True) assert (len(keys) > 0) #query = data.queryDuplication() #canonical_data_key = query.get(keys_only=True) canonical_key = keys[0] canonical_data = canonical_key.get() #assert isinstance(canonical_data_key, ndb.Key) assert data.dataId >= canonical_data.dataId assert data.field == canonical_data.field assert data.string == canonical_data.string client.set(MEMCACHE_KEY, canonical_key) return canonical_key
def fetch_gravatar_url( self, gravatar_url ): memcache = Client() res = memcache.get( gravatar_url ) if not res: res = fetch( gravatar_url ) memcache.set( gravatar_url, res, time=3600 ) return res
def fetch_gravatar_url(self, gravatar_url): memcache = Client() res = memcache.get(gravatar_url) if not res: res = fetch(gravatar_url) memcache.set(gravatar_url, res, time=3600) return res
def fetchByFieldAndString(cls, field, string): assert isinstance(field, unicode) assert isinstance(string, unicode) client = Client() data_keys = client.get(cls._getMemcacheKeyByFieldAndString(field, string)) if data_keys: return data_keys data_keys = cls.queryByFieldAndString(field, string).fetch(keys_only=True) if len(data_keys) >= 2: warn("duplicated data entities with field=%s and string=%s" % (field, string)) client.set(cls._getMemcacheKeyByFieldAndString(field, string), data_keys) return data_keys
def _getDataListByArduinoId(arduino_id): arduino_id = unicode(arduino_id) MEMCACHE_KEY = "kjasnbargasenanviajfiafjjoi" + arduino_id client = Client() data_list = client.get(MEMCACHE_KEY) if not isinstance(data_list, list): query = Data.queryByFieldAndString("arduinoid", arduino_id) data_list = query.fetch(keys_only=True) client.set(MEMCACHE_KEY, data_list) return data_list
def tearDown(self): data_list = Data.query().fetch() self.assertIsInstance(data_list, list) for d in data_list: if d is not None: self.assertIsInstance(d, Data) d.key.delete() client = Client() client.cas_reset() self.testbed.deactivate()
def fetchByFieldAndString(cls, field, string): assert isinstance(field, unicode) assert isinstance(string, unicode) MEMCACHE_KEY = "kml87wfasfp98uw45nvljkbbjlkq4" + field + "nvnjqlagzahk" + string client = Client() data_keys = client.get(MEMCACHE_KEY) if data_keys: return data_keys data_keys = cls.queryByFieldAndString(field, string).fetch(keys_only=True) if len(data_keys) >= 2: warn("duplicated data entities with field=%s and string=%s" % (field, string)) client.set(MEMCACHE_KEY, data_keys) return data_keys
def getEntityByKey(cls, key): assert isinstance(key, db.Key) client = Client() entity = client.get(str(key), namespace="DataEntityByKey") if entity: return entity entities = cls.get([key]) if len(entities) == 0: return None entity = entities[0] assert isinstance(entity, Data) entity.putEntityToMemcache() return entity
def getEntityByKey(cls, key): assert isinstance(key, db.Key) client = Client() entity = client.get(str(key), namespace="DataEntityByKey") if entity: return entity entities = cls.get([key]) assert isinstance(entities, list) if len(entities) == 0: return None entity = entities[0] if entity is None: return None assert isinstance(entity, Data) entity.putEntityToMemcache() return entity
def fetchByFieldAndString(cls, field, string): assert isinstance(field, unicode) assert isinstance(string, unicode) client = Client() data_keys = client.get( cls._getMemcacheKeyByFieldAndString(field, string)) if data_keys: return data_keys data_keys = cls.queryByFieldAndString(field, string).fetch(keys_only=True) if len(data_keys) >= 2: warn("duplicated data entities with field=%s and string=%s" % (field, string)) client.set(cls._getMemcacheKeyByFieldAndString(field, string), data_keys) return data_keys
def app(environ, start_response): path = environ.get('PATH_INFO', '/') scheme = environ.get('wsgi.url_scheme', 'http').strip().lower() host = environ.get('HTTP_HOST', environ.get('SERVER_NAME', '')).strip().lower() urljoin = '{}://{}{}'.format if path != FEED_PATH: new_url = urljoin(scheme, host, FEED_PATH) start_response('301 Moved Permanently', [ ('Content-Type', 'text/plain'), ('Location', new_url), ]) return '', url = urljoin(scheme, host, path) memcache = Client() updated = memcache.get(MEMCACHE_KEY) logger.info('Cache (%r): %r', MEMCACHE_KEY, updated) if updated is None: updated = get_updated() memcache.set(MEMCACHE_KEY, updated, 3600) start_response('200 OK', [('Content-Type', 'application/atom+xml')]) return '''\ <?xml version="1.0" encoding="utf-8"?> <feed xmlns="http://www.w3.org/2005/Atom"> <id>{url}</id> <title>ISO 4217 — Currency codes</title> <link rel="self" href="{url}" type="application/atom+xml" /> <link href="http://www.iso.org/iso/home/standards/currency_codes.htm" type="text/html" /> <link href="http://www.currency-iso.org/dam/downloads/lists/list_one.xml" type="text/xml" /> <updated>{updated:%Y-%m-%d}T00:00:00Z</updated> <author> <name>The Secretariat of the ISO 4217 Maintenance Agency</name> </author> <entry> <id>{url}#{updated:%Y-%m-%d}</id> <title>ISO 4217 — Amendment: {updated:%Y-%m-%d}</title> <link href="http://www.iso.org/iso/home/standards/currency_codes.htm" type="text/html" /> <link href="http://www.currency-iso.org/dam/downloads/lists/list_one.xml" type="text/xml" /> <updated>{updated:%Y-%m-%d}T00:00:00Z</updated> <summary>ISO 4217 — Amendment: {updated:%Y-%m-%d}</summary> </entry> </feed> '''.format(updated=updated, url=url),
class MemcacheClient(object): client = Client() def __init__(self, request, default_time_expire=300): self.request = request self.default_time_expire = default_time_expire def initialize(self): pass def __call__( self, key, f, time_expire=None, ): if time_expire is None: time_expire = self.default_time_expire key = '%s/%s' % (self.request.application, key) value = None obj = self.client.get(key) if time_expire != 0 else None if obj: value = obj[1] elif f is not None: value = f() self.client.set(key, (time.time(), value), time=time_expire) return value def increment(self, key, value=1): key = '%s/%s' % (self.request.application, key) obj = self.client.get(key) if obj: value = obj[1] + value self.client.set(key, (time.time(), value)) return value def incr(self, key, value=1): return self.increment(key, value) def clear(self, key=None): if key: key = '%s/%s' % (self.request.application, key) self.client.delete(key) else: self.client.flush_all() def delete(self, *a, **b): return self.client.delete(*a, **b) def get(self, *a, **b): return self.client.get(*a, **b) def set(self, *a, **b): return self.client.set(*a, **b) def flush_all(self, *a, **b): return self.client.delete(*a, **b)
def getCanonicalData(key): assert isinstance(key, ndb.Key) MEMCACHE_KEY = "akafkljacuiudrt2po8vxdzskj" + str(key) client = Client() canonical_data_key = client.get(MEMCACHE_KEY) if canonical_data_key: return canonical_data_key data = key.get() if data is None: return None assert isinstance(data, Data) query = data.queryDuplication() canonical_data_key = query.get(keys_only=True) assert isinstance(canonical_data_key, ndb.Key) assert data.dataId >= canonical_data_key.get().dataId assert data.field == canonical_data_key.get().field assert data.string == canonical_data_key.get().string client.set(MEMCACHE_KEY, canonical_data_key) return canonical_data_key
def serve_asset_image(self, image_filename): size = self.qs['size'] memcache = Client() memcache_key = 'serve_asset_image-%s-%s' % (image_filename, size) content = memcache.get(memcache_key) if not content: image = images.Image(open('assets/' + image_filename).read()) image.resize(width=size, height=size) content = image.execute_transforms(output_encoding=images.PNG) memcache.set(memcache_key, content, time=86400) self.response.headers['Content-Type'] = 'image/png' self.response.out.write(content) return
def serve_asset_image(self, image_filename): size = self.qs['size'] memcache = Client() memcache_key = 'serve_asset_image-%s-%s' % ( image_filename, size ) content = memcache.get( memcache_key ) if not content: image = images.Image( open('assets/' + image_filename).read() ) image.resize( width=size, height=size ) content = image.execute_transforms( output_encoding=images.PNG ) memcache.set( memcache_key, content, time=86400 ) self.response.headers['Content-Type'] = 'image/png' self.response.out.write(content) return
def getByDataId(cls, data_id): assert isinstance(data_id, int) client = Client() data = client.get(cls._getMemcacheKeyByDataId(data_id)) if data: return data query = ndb.Query(kind="Data") query = query.filter(cls.dataId == data_id) #query = query.order(cls.dataId) data_keys = query.fetch(keys_only=True, limit=2) if data_keys is None: return if len(data_keys) == 0: raise EntityNotFound("Data", {"dataId": data_id}) if len(data_keys) > 1: warn("%s Data entities with dataId %s were found" % (len(data_keys), data_id), RuntimeWarning) data = data_keys[0].get() assert isinstance(data, Data) client.set(cls._getMemcacheKeyByDataId(data_id), data) return data
def __init__(self, *args, **kwargs): from django.conf import settings mem = Client() super(self.__class__, self).__init__(cache=mem, timeout=20, *args, **kwargs) self.consumer = Consumer(settings.OAUTH['foursquare']['app_key'], settings.OAUTH['foursquare']['app_secret'])
class MemcacheClient(object): client = Client() def __init__(self, request): self.request = request def __call__( self, key, f, time_expire=300, ): key = '%s/%s' % (self.request.application, key) dt = time_expire value = None obj = self.client.get(key) if obj and (dt is None or obj[0] > time.time() - dt): value = obj[1] elif f is None: if obj: self.client.delete(key) else: value = f() self.client.set(key, (time.time(), value)) return value def increment(self, key, value=1): key = '%s/%s' % (self.request.application, key) obj = self.client.get(key) if obj: value = obj[1] + value self.client.set(key, (time.time(), value)) return value def incr(self, key, value=1): return self.increment(key, value) def clear(self, key=None): if key: key = '%s/%s' % (self.request.application, key) self.client.delete(key) else: self.client.flush_all() def delete(self, *a, **b): return self.client.delete(*a, **b) def get(self, *a, **b): return self.client.delete(*a, **b) def set(self, *a, **b): return self.client.delete(*a, **b) def flush_all(self, *a, **b): return self.client.delete(*a, **b)
def getByDataId(cls, data_id): assert isinstance(data_id, int) client = Client() data = client.get(cls._getMemcacheKeyByDataId(data_id)) if data: return data query = ndb.Query(kind="Data") query = query.filter(cls.dataId == data_id) #query = query.order(cls.dataId) data_keys = query.fetch(keys_only=True, limit=2) if data_keys is None: return if len(data_keys) == 0: raise EntityNotFound("Data", {"dataId": data_id}) if len(data_keys) > 1: warn( "%s Data entities with dataId %s were found" % (len(data_keys), data_id), RuntimeWarning) data = data_keys[0].get() assert isinstance(data, Data) client.set(cls._getMemcacheKeyByDataId(data_id), data) return data
from google.appengine.ext import ndb from google.appengine.api.memcache import Client import logging dumbmemcache = Client() class Post(ndb.Model): title = ndb.StringProperty() media_url = ndb.StringProperty() likes = ndb.IntegerProperty(default=0) date = ndb.DateTimeProperty(auto_now_add=True) media_key = ndb.BlobKeyProperty() def get_comments(self): return Comment.query(Comment.post == self.key) def comment_count(self): count = dumbmemcache.get('{}_comment_count'.format(self.key.id())) if not count: count = self.get_comments().count() dumbmemcache.set('{}_comment_count', count) return count def get_absolute_url(self): return "/post/" + str(self.key.id()) @classmethod def _post_delete_hook(cls, key, future): comment_keys = Comment.query(Comment.post == key).fetch(keys_only=True) ndb.delete_multi_async(comment_keys)
## if NOT running on Google App Engine use SQLite or other DB db = DAL('sqlite://storage.sqlite') # db = DAL('sqlite://storage.sqlite',lazy_tables=True) to be tested else: ## connect to Google BigTable (optional 'google:datastore://namespace') db = DAL('google:datastore+ndb', lazy_tables=True) # lets try new one below #db = DAL('google:datastore', lazy_tables=True) # lets try new one below #db = DAL('google:datastore+ndb') ## store sessions and tickets there #session.connect(request, response, db=db) # session.connect(request, response, db=db) ## or store session in Memcache, Redis, etc. from gluon.contrib.memdb import MEMDB from google.appengine.api.memcache import Client session.connect(request, response, db=MEMDB(Client())) ## by default give a view/generic.extension to all actions from localhost ## none otherwise. a pattern can be 'controller/function.extension' #response.generic_patterns = ['*'] if request.is_local else [] response.generic_patterns = ['*'] response.formstyle = 'bootstrap3_inline' # or 'bootstrap3_stacked' #response.formstyle = 'bootstrap3_stacked' ## (optional) optimize handling of static files # response.optimize_css = 'concat,minify,inline' # response.optimize_js = 'concat,minify,inline' import os from gluon.tools import Auth, Crud, Service, PluginManager, prettydate, Mail auth = Auth(db, hmac_key=Auth.get_or_create_key())
def deleteEntity(self): client = Client() client.delete( self._getMemcacheKeyByFieldAndString(self.field, self.string)) client.delete(self._getMemcacheKeyByDataId(self.dataId)) self.key.delete()
def deleteEntityFromMemecache(self): client = Client() client.delete(str(self.key()), namespace="DataEntityByKey")
def putEntityToMemcache(self): client = Client() client.add(self.field + self.string, self, namespace="DataByFeldAndString") client.add(str(self.key()), self, namespace="DataEntityByKey")
from collections import defaultdict from datetime import datetime, timedelta import logging from random import getrandbits from google.appengine.ext import ndb, blobstore from google.appengine.api.memcache import Client from google.appengine.api.images import Image, JPEG, get_serving_url, delete_serving_url import cloudstorage as gcs bbs_memcache = Client() class Location(ndb.Model): """ Location in the galaxy to depart or arrive to: Planet, Star, City """ name = ndb.StringProperty(required=True) parent_location = ndb.KeyProperty() all_cache_key = 'all_locations' @classmethod def save_from_request(cls, request): location = cls(id=request.get('name'), name=request.get('name')) parent = request.get('parent') if parent: # in case we ever pass a parent location location.parent_location = ndb.Key(cls, parent) location.put() @classmethod def get_all(cls):
# Replace this value with the Client ID value from your project, # the same numeric value you used in client_secrets.json BILLING_PROJECT_ID = "475473128136" DATA_PROJECT_ID = "publicdata" DATASET = "samples" TABLE = "natality" QUERY = """ select state, SUM(gestation_weeks) / COUNT(gestation_weeks) as weeks from publicdata:samples.natality where year > 1990 and year < 2005 and IS_EXPLICITLY_DEFINED(gestation_weeks) group by state order by weeks """ decorator = oauth2decorator_from_clientsecrets(CLIENT_SECRETS, 'https://www.googleapis.com/auth/bigquery') mem = Client() class InfoHandler(webapp2.RequestHandler): @decorator.oauth_aware def get(self): self.response.out.write(decorator.authorize_url()) class MainHandler(webapp2.RequestHandler): def _bq4geo(self, bqdata): """geodata output for region maps must be in the format region, value. Assume the BigQuery query output is in this format and get names from schema. """ columnNameGeo = bqdata["schema"]["fields"][0]["name"] columnNameVal = bqdata["schema"]["fields"][1]["name"] #logging.info("Column Names=%s, %s" % (columnNameGeo, columnNameVal)) geodata = [];
def deleteEntity(self): client = Client() client.delete(self._getMemcacheKeyByFieldAndString(self.field, self.string)) client.delete(self._getMemcacheKeyByDataId(self.dataId)) self.key.delete()
def __init__(self, *args, **kwargs): mem = Client() super(self.__class__, self).__init__(cache=mem, *args, **kwargs) from django.conf import settings self.key = settings.API['google_places']
def __init__(self, request): self.request = request Client.__init__(self)
def __init__(self, *args, **kwargs): mem = Client() super(OSMRequest, self).__init__(cache=mem, timeout=20, *args, **kwargs)
def _cache(self): if getattr(self, '_client', None) is None: self._client = Client() return self._client