def get_cache(self, *args, **kwargs): _cache_options['args'] = args _cache_options['kwargs'] = kwargs try: c = cache.get_cache(*args, **kwargs) _cache_options['cls'] = c.namespace.__class__.__name__ except TypeError: _cache_options['cls'] = 'NoImplementation' c = cache.get_cache(*args, type='memory', **kwargs) return c
def get_cache(self, *args, **kwargs): _cache_options["args"] = args _cache_options["kwargs"] = kwargs try: c = cache.get_cache(*args, **kwargs) _cache_options["cls"] = c.namespace.__class__.__name__ except TypeError: _cache_options["cls"] = "NoImplementation" c = cache.get_cache(*args, type="memory", **kwargs) return c
def simple(self, a): c = cache.get_cache("BasicTGController.index") x = c.get_value(key=a, createfunc=lambda: "cached %s" % a, type="memory", expiretime=3600) return x
def expiry(self, a): mockdb['expiry'] = a # inject a value into the context c = cache.get_cache("BasicTGController.index") x = c.get_value(key='test', createfunc=self.createfunc, type="memory", expiretime=100) return x
def get_takeoff_locations(self): _cache = cache.get_cache('users.takeoff_locations', expire=60 * 5) def update_cache(): return Location.get_clustered_locations(Flight.takeoff_location_wkt, filter=(Flight.pilot == self.user)) return _cache.get(key=self.user.id, createfunc=update_cache)
def get_similarity(self): def calc(): log.debug('Calculating similarity matrix for key %s...', self.key) return all_pairs([s.source for s in self.submissions]) simcache = cache.get_cache('similarity') matrix = simcache.get_value(key=self.key, createfunc=calc, expiretime=7 * 24 * 60 * 60) # 7 days return matrix
def get_bestsellers(cls): def _fetch_bestsellers(): skus = [] for product in cls.get_many('product', {'active': True}).sort([('sold', DESCENDING)]).limit(12).all(): skus.append(product.configurations[0].sku) return skus bestselling_cache = cache.get_cache('bestselling_products') bestseller = bestselling_cache.get_value(key='bestseller', expiretime=24 * 3600, createfunc=_fetch_bestsellers) return bestseller
def create_shortcut_(self, **kw): logger = logging.getLogger('PermissionsController.create_shortcut_') try: user_id = get_paramw(kw, 'user_id', int) permission_id = get_paramw(kw, 'permission_id', int) groups = get_paramw(kw, 'groups', sj.loads) p = dbs.query(SapnsPermission).get(permission_id) if p.type not in [SapnsPermission.TYPE_LIST, SapnsPermission.TYPE_PROCESS] or \ p.type == SapnsPermission.TYPE_PROCESS and p.requires_id: raise EPermissions(_(u'Shortcuts can only be created from LIST and PROCESS (no required id) type permissions')) title = p.display_name if p.type == SapnsPermission.TYPE_LIST: title = p.class_.title for id_group in groups: group = dbs.query(SapnsShortcut).get(id_group) sc = SapnsShortcut() sc.user_id = user_id sc.parent_id = id_group sc.permission_id = permission_id sc.title = title sc.order = group.next_order() dbs.add(sc) dbs.flush() _key = '%d_%d' % (user_id, id_group) cache.get_cache('user_get_shortcuts').remove_value(key=_key) return dict(status=True) except EPermissions, e: logger.error(e) return dict(status=False, msg=unicode(e))
def stats(self, **kw): log.info('stats %s' % kw) wpublic = kw.pop('wpublic', 0) anonymous = identity.anonymous() def usage_resource(): log.info("CALL EXPENSIVE") all_count = data_service.count("image", wpublic=wpublic, images2d=True, parent=False) image_count = data_service.count("image", wpublic=wpublic, permcheck=not anonymous) #images2d = data_service.count("image", wpublic=wpublic, images2d=True) tag_count = data_service.count("tag", wpublic=wpublic, permcheck=not anonymous, parent=False) gob_count = data_service.count("gobject", wpublic=wpublic, permcheck=not anonymous, parent=False) resource = etree.Element('resource', uri='/usage/stats') etree.SubElement(resource, 'tag', name='number_images', value=str(all_count)) etree.SubElement(resource, 'tag', name='number_images_user', value=str(image_count)) #etree.SubElement(resource, 'tag', name='number_images_planes', value=str(images2d)) etree.SubElement(resource, 'tag', name='number_tags', value=str(tag_count)) etree.SubElement(resource, 'tag', name='number_gobs', value=str(gob_count)) return etree.tostring(resource) usage_cache = cache.get_cache("usage") resource = usage_cache.get_value(key=identity.get_username(), createfunc=usage_resource, expiretime=3600) return resource
def all_the_vats(cls): def aggregate_vats(): vat_for_status = DBSession.impl.db.orders.aggregate([{'$project': {'items': 1, 'status': 1}}, {'$unwind': '$items'}, {'$group': {'_id': '$status', 'vat_rates': {'$addToSet': '$items.rate'}}}]) return sorted(set(chain(*[v['vat_rates'] for v in vat_for_status]))) vat_cache = cache.get_cache('all_the_vats') cachedvalue = vat_cache.get_value( key='42', createfunc=aggregate_vats, expiretime=3600*24*30 # one month ) return cachedvalue
def uploads(self, **kw): log.info('uploads %s' % kw) def fetch_counts(): return self.get_counts('image', 31) usage_cache = cache.get_cache("uploads") counts, days = usage_cache.get_value(key=identity.get_username(), createfunc=fetch_counts, expiretime=600) resource = etree.Element('resource', uri='/usage/uploads') etree.SubElement(resource, 'tag', name='counts', value=','.join(counts)) etree.SubElement(resource, 'tag', name='days', value=','.join(days)) return etree.tostring(resource)
def analysis_monthly(self, **kw): log.info('uploads %s' % kw) def fetch_counts(): return self.get_counts_month('mex', 13) usage_cache = cache.get_cache("analysis_monthly") counts, days = usage_cache.get_value(key=identity.get_username(), createfunc=fetch_counts, expiretime=3600) #counts, days = self.get_counts_month('mex', 13) resource = etree.Element('resource', uri='/usage/analysis_monthly') etree.SubElement(resource, 'tag', name='counts', value=','.join(counts)) etree.SubElement(resource, 'tag', name='days', value=','.join(days)) return etree.tostring(resource)
def index(self, **kw): query = self.get_latest_fixes() na_cache = cache.get_cache('tracking.nearest_airport', expire=60 * 60) tracks = [] for track in query.all(): def get_nearest_airport(): airport = Airport.by_location(track.location, None) if airport is None: return None, None distance = airport.distance(track.location) return airport, distance airport, distance = na_cache.get(key=track.id, createfunc=get_nearest_airport) tracks.append([track, airport, distance]) return dict(tracks=tracks)
def index(self, **kw): na_cache = cache.get_cache('tracking.nearest_airport', expire=60 * 60) def add_nearest_airport_data(track): def get_nearest_airport(): airport = Airport.by_location(track.location, None) if airport is None: return None, None distance = airport.distance(track.location) return airport, distance airport, distance = na_cache.get(key=track.id, createfunc=get_nearest_airport) return track, airport, distance tracks = [] tracks.extend(map(add_nearest_airport_data, TrackingFix.get_latest())) return dict(tracks=tracks)
def get_distance_flights(self): _cache = cache.get_cache('users.distance_flights', expire=60 * 5) def update_cache(): distance_flights = [] largest_flight = self.user.get_largest_flights().first() if largest_flight: distance_flights.append([largest_flight.olc_classic_distance, largest_flight]) for distance in [50000, 100000, 300000, 500000, 700000, 1000000]: distance_flight = self.get_distance_flight(distance) if distance_flight is not None: distance_flights.append([distance, distance_flight]) distance_flights.sort() return distance_flights return _cache.get(key=self.user.id, createfunc=update_cache)
def show_event(self, event_id = None): if event_id is None: redirect(url('show_events')) q = DBSession.query(db.Event) q = q.filter(db.Event.id == event_id).order_by(db.Event.id) event = q.one() # As ResultsDB is probably the biggest bottle-neck loading/preparsing data # is cached. # The current caching implementation can not call methods with arguments # so we need to prepare a partial function, with the parameters pre-filled. cached_func = partial(rdb_preparse.show_event, self.rdb, event) mycache = cache.get_cache('show_event_resultsdb') cachedvalue = mycache.get_value( key="event_id_%s" % event_id, createfunc= cached_func, expiretime=10) #FIXME: make the expiretime configurable filled = json.loads(request.cookies.get("filled", "{}")) return dict(event=event, filled = filled, results = cachedvalue)