Esempio n. 1
0
 def _get_caches(self):
     self.fbcache = cache.get_cache('fbprofile')
     self.fbfriendscache = cache.get_cache('fbfriends')
     self.fballfriendscache = cache.get_cache('fballfriends')
     # Facebook session_key_expires is not set for some reason
     #self._fbexpiration = facebook.session_key_expires - time.time()
     self._fbexpiration = 24*60*60 #24 hours
Esempio n. 2
0
 def get(self, key, default=None):
     mycache = pylonscache.get_cache('demisauce')
     try:
         myvalue = mycache.get_value(key)
     except KeyError:
         return default
     return myvalue
Esempio n. 3
0
def project_get_eager(proj):
    """Return a project eagerloaded with its scenes and libgroups
    
    ``project_get_eager`` keeps a (thread-local) cache of loaded projects,
    reloading instances from the db if the "modified" field is newer then the
    cache.
    """
    session = session_get()
    
    # get a lazyload instance of the project, save the modified time and discard
    curproject = project_get(proj)
    modified = curproject.modified
    session.expunge(curproject)
    
    # get the project from cache
    projcache = cache.get_cache('projects')
    project, cached = projcache.get_value(key=proj,
                                  createfunc=eagerload_maker(proj),
                                  expiretime=360)

    # check if its older then the db
    if cached < modified:
        # remove the invalidated value from the cache and reload from db
        projcache.remove_value(proj)
        project, cached = projcache.get_value(key=proj,
                                  createfunc=eagerload_maker(proj),
                                  expiretime=360)
    
    # put project back into the session if necessary
    try:
        session.add(project)
    except InvalidRequestError:
        pass
    
    return project
Esempio n. 4
0
 def cached_model(self, *args, **kwargs):
     from pylons import cache
     mycache = cache.get_cache('helloworld')
     entries = mycache.get_value(key='entries',
                                 createfunc=self._get_entries,
                                 expiretime=3600)
     return dict(entries=entries)
Esempio n. 5
0
	def _getInDirCover(self):
		path_cache = cache.get_cache('album_path', type='memory')
		key = self.album.key()

		dirname = path_cache.get_value(key=key,
			createfunc=self.album.getPath, expiretime=60)

		if dirname == None:
			return None

		def best_image(x, y):
			pattern = '(cover|album|front)'

			if re.match(pattern, x, re.I):
				return x
			else:
				return y

		if not (fs.exists(dirname) and fs.isdir(dirname)):
			return None

		dir = fs.listdir(dirname)
		dir = filter(lambda x: x.endswith(
			('jpg', 'JPG', 'jpeg', 'JPEG')), dir)

		if len(dir) < 1:
			return None

		bestmatch = reduce(best_image, dir)
		return os.path.join(dirname, bestmatch)
Esempio n. 6
0
 def __init__(self, type='dbm'):
     """
     Initialise a dataset index cache
     """
     self.cache_enabled = app_globals.cache_enabled
     self.cache = cache.get_cache('DATASET_INDEX_CACHE',
                                  type=type)
Esempio n. 7
0
 def simple(self, a):
     c = cache.get_cache("BasicTGController.index")
     x = c.get_value(key=a, 
                     createfunc=lambda: "cached %s" % a,
                     type="memory",
                     expiretime=3600)
     return x
Esempio n. 8
0
 def __init__(self, dataset, type='dbm'):
     self.dataset = dataset
     opt = config.get('openspending.cache_enabled', 'True')
     self.cache_enabled = asbool(opt) and \
             not self.dataset.private
     self.cache = cache.get_cache('DSCACHE_' + dataset.name,
                                  type=type)
Esempio n. 9
0
    def __call__(self, environ, start_response):
        # Insert any code to be run per request here. The Routes match
        # is under environ['pylons.routes_dict'] should you want to check
        # the action or route vars here

        # Grab Domain Info
        self.domain = request.environ['REMOTE_USER']
        self.dominfo = get_domain_info(self.domain)

        # Don't allow Locked Domains to make any changes
        if self.dominfo['ispmanDomainLocked'] == 'true' and \
           request.path_info != '/locked':
            h.redirect_to('/locked')
        elif request.path_info == '/':
            h.redirect_to('/domain')

        ccache = cache.get_cache('navigation')

        c.menus = ccache.get_value('i18n_menus',
                                  createfunc=self.__create_i18n_menus,
                                  type='memory', expiretime=3600)

        c.controller = request.environ['pylons.routes_dict']['controller']
        c.action = request.environ['pylons.routes_dict']['action']

        c.imgs_list = self.__images_list()

        if 'message' in session and session['message'] != '':
            c.message = session['message']
            session['message'] = ''
            session.save()

        return WSGIController.__call__(self, environ, start_response)
Esempio n. 10
0
 def poll(self, global_limit, global_max_age):
     """Fetches cached updates."""
     try:
         return cache.get_cache('spline-frontpage')[self.cache_key()]
     except KeyError:
         # Haven't cached anything yet, apparently
         return []
Esempio n. 11
0
    def __before__(self, **kwds):

        # Load the hot tags from cache
        mycache = cache.get_cache('hot_tags')
        log.debug('before call to mycache.get_value("tags"))')
        c.tags = mycache.get_value(key='tags', createfunc=Tag.popular,
                                   type="memory", expiretime=3600)

        log.debug('after call to mycache.get_value("tags"))')

        c.sources = mycache.get_value(key='sources', createfunc=Feed.active_feeds,
                                    type='memory', expiretime=3600)

        log.debug('after call to mycache.get_value("feeds"))')

        # Pass the logo_file name to the template context
        c.logo_file = config.get('logo_file', 'logo.png')

        # Pass the site sub-title to the template context
        c.subtitle = config.get('banner_subtitle', None)

        # Set up pagination
        if self.__class__.__name__ == 'EntryController':
            if not 'view' in kwds:
                return
            c.pagesize = 25
            c.totlinks = 5000 # Probably should look this up from db and cache it...
            c.page = kwds.get('page', 0)
            try:
                c.page_numeric = long(c.page)
            except:
                abort(404)
            if c.page_numeric < 0:
                abort(404)
            c.slicestart = c.pagesize * c.page_numeric
Esempio n. 12
0
 def poll(self, global_limit, global_max_age):
     """Fetches cached updates."""
     try:
         return cache.get_cache('spline-frontpage')[self.cache_key()]
     except KeyError:
         # Haven't cached anything yet, apparently
         return []
Esempio n. 13
0
 def simple(self, a):
     c = cache.get_cache("BasicTGController.index")
     x = c.get_value(key=a,
                     createfunc=lambda: "cached %s" % a,
                     type="memory",
                     expiretime=3600)
     return x
Esempio n. 14
0
    def cache_content(self, key, do_work, template):
        """Argh!

        Okay, so.  Use this when you want to cache the BODY of a page but not
        the CHROME (i.e., wrapper or base or whatever).

        ``key``
            The key that uniquely identifies this particular rendering of this
            page content.

        ``do_work``
            Some function that will stuff a bunch of expensive data in c.  This
            will only be called if the page hasn't yet been cached.  It'll be
            passed the key.

        ``template``
            Name of the template to use.

        Also, DO NOT FORGET TO wrap the cachable part of your template in a
        <%lib:cache_content> tag, or nothing will get cached!

        If a page body is pulled from cache, c.timer.from_cache will be set to
        True.  If the page had to be generated, it will be set to False.  (If
        this function wasn't involved at all, it will be set to None.)
        """

        # Content needs to be cached per-language
        key = u"{0}/{1}".format(key, c.lang)

        # Cache for...  ten hours?  Sure, whatever
        content_cache = cache.get_cache("content_cache:" + template, expiretime=36000)

        # XXX This is dumb.  Caches don't actually respect the 'enabled'
        # setting, so we gotta fake it.
        if not content_cache.nsargs.get("enabled", True):

            def skip_cache(context, mako_def):
                do_work(key)
                mako_def.body()

            c._cache_me = skip_cache
            return render(template)

        # These pages can be pretty big.  In the case of e.g. memcached, that's
        # a lot of RAM spent on giant pages that consist half of whitespace.
        # Solution: gzip everything.  Use level 1 for speed!
        def cache_me(context, mako_def):
            c.timer.from_cache = True

            def generate_page():
                c.timer.from_cache = False
                do_work(key)
                return zlib.compress(capture(context, mako_def.body).encode("utf8"), 1)

            context.write(zlib.decompress(content_cache.get_value(key=key, createfunc=generate_page)).decode("utf8"))

        c._cache_me = cache_me

        return render(template)
Esempio n. 15
0
 def expiry(self, a):
     mockdb['expiry'] = a  # inject a value into the context
     c = cache.get_cache("BasicTGController.index")
     x = c.get_value(key='test',
                     createfunc=self.createfunc,
                     type="memory",
                     expiretime=100)
     return x
Esempio n. 16
0
 def cache(self, id):
     '''Manual way to clear the caches'''
     if id == 'clear':
         wui_caches = ['stats']
         for cache_name in wui_caches:
             cache_ = cache.get_cache(cache_name, type='dbm')
             cache_.clear()
         return 'Cleared caches: %s' % ', '.join(wui_caches)
Esempio n. 17
0
 def bookmark(self, id_shortcut, **params):
     logger = logging.getLogger(__name__ + '/bookmark')
     try:
         logger.info('Bookmarking shortcut [%s]' % id_shortcut)
         user = dbs.query(SapnsUser).get(request.identity['user'].user_id)
         
         dboard = user.get_dashboard()
         dboard.add_child(id_shortcut)
         
         _key = '%d_%d' % (user.user_id, dboard.shortcut_id)
         cache.get_cache('user_get_shortcuts').remove_value(key=_key)
         
         return dict(status=True)
         
     except Exception, e:
         logger.error(e)
         return dict(status=False) #, message=str(e).decode('utf-8'))
Esempio n. 18
0
 def init_tree(self, key, fresh=False, **kw):
     c = cache.get_cache('feeds')
     if fresh:
         return self._get_feed_titles(fresh=fresh)
     else:
         return c.get_value(key='feed_titles',
                            createfunc=self._get_feed_titles,
                            expiretime=3600)
Esempio n. 19
0
    def bookmark(self, id_shortcut, **params):
        logger = logging.getLogger(__name__ + '/bookmark')
        try:
            logger.info('Bookmarking shortcut [%s]' % id_shortcut)
            user = dbs.query(SapnsUser).get(request.identity['user'].user_id)

            dboard = user.get_dashboard()
            dboard.add_child(id_shortcut)

            _key = '%d_%d' % (user.user_id, dboard.shortcut_id)
            cache.get_cache('user_get_shortcuts').remove_value(key=_key)

            return dict(status=True)

        except Exception, e:
            logger.error(e)
            return dict(status=False)  #, message=str(e).decode('utf-8'))
Esempio n. 20
0
 def expiry(self, a):
     mockdb['expiry'] = a # inject a value into the context
     c = cache.get_cache("BasicTGController.index")
     x = c.get_value(key='test', 
                     createfunc=self.createfunc,
                     type="memory",
                     expiretime=100)
     return x
Esempio n. 21
0
 def init_tree(self, key, fresh=False, **kw):
     c = cache.get_cache('feeds')
     if fresh:
         return self._get_feed_titles(fresh=fresh)
     else:
         return c.get_value(key='feed_titles',
                            createfunc=self._get_feed_titles,
                            expiretime=3600)
Esempio n. 22
0
 def cache(self, id):
     '''Manual way to clear the caches'''
     if id == 'clear':
         wui_caches = ['stats']
         for cache_name in wui_caches:
             cache_ = cache.get_cache(cache_name, type='dbm')
             cache_.clear()
         return 'Cleared caches: %s' % ', '.join(wui_caches)
Esempio n. 23
0
    def do_cron(self, tic, *args, **kwargs):
        if tic % self.poll_frequency != 0:
            # Too early!
            return

        updates = self._poll(self.limit, self.max_age)
        if updates is not None:
            cache.get_cache('spline-frontpage')[self.cache_key()] = updates

        return
Esempio n. 24
0
File: page.py Progetto: seken/wikpy
	def save(self, page):
		namespace, key = create_cache_key(self.show, {'page':page,})
		cache.get_cache(namespace).remove(key)
		np = self._splitName(page)
		newPage = self._fetchPageRaw(page)
		if newPage == None:
			name = np[1]
			if len(name) == 0:
				name = '::'
			newPage = Page(name)
			newPage.namespace = Namespace(np[0])
			Session.add(newPage)
		newPage.text = request.POST['text']
		tags = list()
		for i in request.POST['tags'].split(','):
			if len(i) > 0:
				tags.append(Tag(i))
		Session.commit()
		redirect('/%s' % page)
Esempio n. 25
0
    def do_cron(self, tic, *args, **kwargs):
        if tic % self.poll_frequency != 0:
            # Too early!
            return

        updates = self._poll(self.limit, self.max_age)
        if updates is not None:
            cache.get_cache('spline-frontpage')[self.cache_key()] = updates

        return
Esempio n. 26
0
 def _render_from_cache(action, self, *args, **kwargs):
     context = dict(
         tmpl_context = self._py_object.tmpl_context,
         app_globals = self._py_object.config['pylons.app_globals'],
         config = self._py_object.config,
         request = self._py_object.request,
         response = self._py_object.response,
         translator = pylons.translator._current_obj(),
         session = pylons.session._current_obj(),
     )
     url = self._py_object.request.url
 
     def createfunc():
         context['url'] = routes.util.URLGenerator(context['config']['routes.map'],
                                                   context['request'].environ)
         headers_copy = {}
         for header, value in context['response'].headers.iteritems():
             headers_copy[header] = value
         for key, value in context.iteritems():
             getattr(pylons, key)._push_object(value)
         
         content = action(self, *args, **kwargs)
         
         cached_headers = {}
         for header, value in context['response'].headers.iteritems():
             if header not in headers_copy or headers_copy[header] != value:
                 cached_headers[header] = value
         log.debug('Headers Copy: %s', headers_copy)
         log.debug('Headers: %s', context['response'].headers)
         log.debug('Cached Headers: %s', cached_headers)
         for key, value in context.iteritems():
             getattr(pylons, key)._pop_object(value)
         return (cached_headers, content)
     
     if context['app_globals'].cache_enabled:
         my_cache = cache.get_cache(
             context['config']['templates.namespace'],
             type=context['config'].get('beaker.cache.type', 'memory'),
             out_of_band=True)
         try:
             headers, content = my_cache.get_value(
                 key=url, createfunc=createfunc, expiretime=60)
             for header, value in headers.iteritems():
                 context['response'].headers[header] = value
             return content
         except NewValueInProgressException:
             context['response'].status = 503
             return rendering_action(*args, **kwargs)
     else:
         return action(self, *args, **kwargs)
Esempio n. 27
0
 def delete(self, id_shortcut, **params):
     
     logger = logging.getLogger(__name__ + '/delete')
     try:
         logger.info('Deleting shortcut [%s]' % id_shortcut)
         
         # the shortcut to be deleted
         sc = dbs.query(SapnsShortcut).get(id_shortcut)
         
         dbs.query(SapnsShortcut).\
             filter(SapnsShortcut.shortcut_id == id_shortcut).\
             delete()
         
         dbs.flush()
          
         _key = '%d_%d' % (sc.user_id, sc.parent_id)
         cache.get_cache('user_get_shortcuts').remove_value(key=_key)
     
         return dict(status=True)
 
     except Exception, e:
         logger.error(e)
         return dict(status=False)
Esempio n. 28
0
    def delete(self, id_shortcut, **params):

        logger = logging.getLogger(__name__ + '/delete')
        try:
            logger.info('Deleting shortcut [%s]' % id_shortcut)

            # the shortcut to be deleted
            sc = dbs.query(SapnsShortcut).get(id_shortcut)

            dbs.query(SapnsShortcut).\
                filter(SapnsShortcut.shortcut_id == id_shortcut).\
                delete()

            dbs.flush()

            _key = '%d_%d' % (sc.user_id, sc.parent_id)
            cache.get_cache('user_get_shortcuts').remove_value(key=_key)

            return dict(status=True)

        except Exception, e:
            logger.error(e)
            return dict(status=False)
Esempio n. 29
0
	def get(self):
		if config.get('cache_cover_art'):
			cover_path_cache = cache.get_cache('cover_path')
			song = "%s - %s" % (str(self.album.artist),
					str(self.album.name))
			path = cover_path_cache.get_value(key=song,
				createfunc=self._getCover, expiretime=300)

			if path == None:
				cover_path_cache.remove_value(song)
		else:
			path = self._getCover()

		return path
Esempio n. 30
0
 def all_(self):
     #logger = logging.getLogger('Users.all')
     def _all():
         #logger.info('Getting all users...')
         users = []
         for user in dbs.query(SapnsUser).order_by(SapnsUser.user_id):
             users.append(dict(id=user.user_id, display_name=user.display_name, 
                               user_name=user.user_name))
             
         return users
             
     _cache = cache.get_cache('users_all')
                     
     return dict(users=_cache.get_value(key='all', createfunc=_all, expiretime=0))
Esempio n. 31
0
    def forums(self):
        c.forums = meta.Session.query(forum_model.Forum) \
            .order_by(forum_model.Forum.id.asc()) \
            .all()

        # Get some forum stats.  Cache them because they're a bit expensive to
        # compute.  Expire after an hour.
        # XXX when there are admin controls, they'll need to nuke this cache
        # when messing with the forum list
        forum_cache = cache.get_cache('spline-forum', expiretime=3600)
        c.forum_activity = forum_cache.get_value(
            key='forum_activity', createfunc=get_forum_activity)
        c.forum_volume = forum_cache.get_value(
            key='forum_volume', createfunc=get_forum_volume)

        try:
            c.max_volume = max(c.forum_volume.itervalues()) or 1
        except ValueError:
            # Empty database
            c.max_volume = 1

        # Need to know the last post for each forum, in realtime
        c.last_post = {}
        last_post_subq = meta.Session.query(
                forum_model.Forum.id.label('forum_id'),
                func.max(forum_model.Post.posted_time).label('posted_time'),
            ) \
            .outerjoin(forum_model.Thread) \
            .outerjoin(forum_model.Post) \
            .group_by(forum_model.Forum.id) \
            .subquery()
        last_post_q = meta.Session.query(
                forum_model.Post,
                last_post_subq.c.forum_id,
            ) \
            .join((
                last_post_subq,
                forum_model.Post.posted_time == last_post_subq.c.posted_time,
            )) \
            .options(
                joinedload('thread'),
                joinedload('author'),
            )
        for post, forum_id in last_post_q:
            c.last_post[forum_id] = post

        return render('/forum/forums.mako')
Esempio n. 32
0
    def forums(self):
        c.forums = meta.Session.query(forum_model.Forum) \
            .order_by(forum_model.Forum.id.asc()) \
            .all()

        # Get some forum stats.  Cache them because they're a bit expensive to
        # compute.  Expire after an hour.
        # XXX when there are admin controls, they'll need to nuke this cache
        # when messing with the forum list
        forum_cache = cache.get_cache('spline-forum', expiretime=3600)
        c.forum_activity = forum_cache.get_value(key='forum_activity',
                                                 createfunc=get_forum_activity)
        c.forum_volume = forum_cache.get_value(key='forum_volume',
                                               createfunc=get_forum_volume)

        try:
            c.max_volume = max(c.forum_volume.itervalues()) or 1
        except ValueError:
            # Empty database
            c.max_volume = 1

        # Need to know the last post for each forum, in realtime
        c.last_post = {}
        last_post_subq = meta.Session.query(
                forum_model.Forum.id.label('forum_id'),
                func.max(forum_model.Post.posted_time).label('posted_time'),
            ) \
            .outerjoin(forum_model.Thread) \
            .outerjoin(forum_model.Post) \
            .group_by(forum_model.Forum.id) \
            .subquery()
        last_post_q = meta.Session.query(
                forum_model.Post,
                last_post_subq.c.forum_id,
            ) \
            .join((
                last_post_subq,
                forum_model.Post.posted_time == last_post_subq.c.posted_time,
            )) \
            .options(
                joinedload('thread'),
                joinedload('author'),
            )
        for post, forum_id in last_post_q:
            c.last_post[forum_id] = post

        return render('/forum/forums.mako')
Esempio n. 33
0
 def query(self, *args, **kwargs):
     from paste.deploy.converters import asbool
     from pylons.decorators.cache import create_cache_key
     from pylons import config, cache
     cache_enabled = asbool(config.get('openspending.cache_enabled', 'False'))
     # TODO: factor this into its own module
     if not cache_enabled:
         return self._query(*args, **kwargs)
     query_cache = cache.get_cache('cubes_query')
     _id = kwargs.copy()
     _id.update({'args': args})
     _id.update({'collection': self.collection_name})
     def run():
         return self._query(**kwargs)
     key = repr(create_cache_key(self._query, _id))
     return query_cache.get_value(key=key, createfunc=run,
         type="dbm", expiretime=3600)
Esempio n. 34
0
    def all_(self):
        #logger = logging.getLogger('Users.all')
        def _all():
            #logger.info('Getting all users...')
            users = []
            for user in dbs.query(SapnsUser).order_by(SapnsUser.user_id):
                users.append(
                    dict(id=user.user_id,
                         display_name=user.display_name,
                         user_name=user.user_name))

            return users

        _cache = cache.get_cache('users_all')

        return dict(
            users=_cache.get_value(key='all', createfunc=_all, expiretime=0))
Esempio n. 35
0
def pylons_demisauce_ws_get(method, resource_id="", format="html", isadmin=False, cachetime=0, **kwargs):
    """
    method
    resource_id (which piece of content)
    """

    def ws_get():
        return demisauce_ws_get(method, resource_id, format=format)

    mycache = cache.get_cache("demisauce.remotecontent")

    if cachetime == 0:
        if "demisauce.cacheduration" in cfg.CFG:
            cachetime = int(cfg.CFG["demisauce.cacheduration"])
    # Get the value, this will create the cache copy the first time
    # and any time it expires (in seconds, so 3600 = one hour)
    myvalue = mycache.get_value("%s-%s-%s" % (method, resource_id, format), createfunc=ws_get, expiretime=cachetime)
    return myvalue
Esempio n. 36
0
    def _get_fbid(self, request):
        session_key = request.params.get('session_key')
        if session_key == None:
            return None

        @fbaccess_noredirect
        def get_fbid():
            facebook.session_key = session_key
            fbid = facebook.users.getLoggedInUser()
            if type(fbid) == int:
                return str(fbid)
            return fbid

        sessionc = cache.get_cache('upload.sessions')
        return sessionc.get(session_key,
            expiretime = 120,
            createfunc = get_fbid
        )
Esempio n. 37
0
 def wrapper(func, self, *args, **kwargs):
     c = cache.get_cache('%s.%s' % 
         (func.__module__, func.__name__))
     funcargs = {
         'key': self.id,
         'createfunc': lambda: func(self, *args, **kwargs)
     }
     if type:
         funcargs['type'] = type
     if expiretime:
         funcargs['expiretime'] = expiretime
     val = c.get_value(**funcargs)
     if addsession:
         if hasattr(val, '__iter__'):
             for r in xrange(0, len(val)):
                 val[r] = Session.merge(val[r], dont_load=True)
         else:
             val = Session.merge(val, dont_load=True)
     return val
Esempio n. 38
0
    def cron(self):
        """Runs interested cron-jobs."""
        cron_cache = cache.get_cache('spline:cron')

        # XXX Tiny race condition here; checking for a value and then setting
        # it is not atomic
        if 'LOCK' in cron_cache:
            return 'already running'
        cron_cache['LOCK'] = 1
        try:
            now = datetime.datetime.now().time()
            tic = now.hour * 60 + now.minute
            run_hooks('cron', tic=tic)

        finally:
            # Always unlock when done
            del cron_cache['LOCK']

        return 'ok'
    def cache_config(self):
        public_display, sysadmin_display, cache_timeout = get_cache_config()

        vars = {
            'public_display': public_display,
            'sysadmin_display': sysadmin_display,
            'cache_timeout': cache_timeout
        }

        data = request.POST
        if 'save' in data:
            pd = True if data.get('public_display') is not None else False
            ad = True if data.get('sysadmin_display') is not None else False
            ct = data['cache_timeout'] if data.get(
                'cache_timeout') else cache_timeout
            model.Session.query(DsStatsCache).update({
                'public_display': pd,
                'sysadmin_display': ad,
                'cache_timeout': ct
            })
            model.Session.commit()
            our_cache = cache.get_cache('stats', type='memory')
            our_cache.clear()
            h.flash_success(_('Cache config updated'))
            h.redirect_to(controller=ADMINSTATS_CTRL, action='cache_config')

        vars = {
            'public_display': public_display,
            'sysadmin_display': sysadmin_display,
            'cache_timeout': cache_timeout,
            'cache_options':  [
                {'text': 'No caching', 'value': 0},
                {'text': '1 min', 'value': 60},
                {'text': '5 min', 'value': 300},
                {'text': '30 min', 'value': 1800},
                {'text': '1 hour', 'value': 3600},
                {'text': '12 hours', 'value': 43200},
                {'text': '1 day', 'value': 86400},
                {'text': '1 week', 'value': 604800}
            ]
        }
        return render('admin/cache_config.html', extra_vars=vars)
Esempio n. 40
0
def tag_cloud(site_id=0,tag_type=None,link='',cachetime=180):
    """tag cloud"""
    def tag_make():
        from demisauce.model.tag import Tag
        alltags = Tag.by_cloud(site_id=site_id,tag_type=tag_type)
        tag_links = []
        tagct = [t[1] for t in alltags]
        #  max size = 150%, min size = 50%
        # if 100 tags, max = 20, min =1
        for row in alltags:
            tag_links.append('''<a href="%s%s" id="tag_%s" class="tagged" 
                style="font-size:%s%s">%s</a>''' % (link,row[0],row[0],tag_weight(row[1]),'%',row[0]))
        return '  '.join(tag_links)
    
    mycache = cache.get_cache('demisauce.tagss' )
    # Get the value, this will create the cache copy the first time
    # and any time it expires (in seconds, so 3600 = one hour)
    myvalue = mycache.get_value('tag.%s.linkcloud' % (tag_type), 
        createfunc=tag_make,expiretime=cachetime)
    return myvalue
Esempio n. 41
0
    def actionp_update(self, **kw):
        
        logger = logging.getLogger('PrivilegesController.actionp_update')
        try:
            id_action = get_paramw(kw, 'id_action', int)
            granted = get_paramw(kw, 'granted', strtobool)
            
            id_role = get_paramw(kw, 'id_role', int, opcional=True)
            #id_user = get_paramw(kw, 'id_user', int, opcional=True)
            #if id_role:
            who = dbs.query(SapnsRole).get(id_role)
                
#            else:
#                who = dbs.query(SapnsUser).get(id_user)
                
            action = dbs.query(SapnsPermission).get(id_action)
                
            if granted:
                logger.info('Creating action privilege')
                who.permissions_.append(action)
                
            else:
                logger.info('Deleting action privilege')
                who.permissions_.remove(action)
                
            dbs.flush()
            
            # reset cache
            _cache = cache.get_cache(SapnsPermission.CACHE_ID)
            for user in who.users_:
                _cache.remove_value(key='%d_%d' % (user.user_id, action.class_id))

            return dict(status=True)
            
        except Exception, e:
            logger.error(e)
            return dict(status=False, message=str(e).decode('utf-8'))
Esempio n. 42
0
def tag_links(site_id=0,tag_type=None,tags=tags,cachetime=180):
    """
    Converts a list of tags to a list of links
    :tag_type: the type since tags can refer to many things
    
    """
    selected_tags = tags
    def tag_make():
        from demisauce.model.tag import Tag
        alltags = Tag.by_key(site_id=site_id,tag_type=tag_type)
        tag_links = []
        for tag in alltags:
            if tag in selected_tags:
                tag_links.append('''<a href="#" id="tag_%s" class="tagged">%s</a>''' % (tag.replace(':',''),tag))
            else:
                tag_links.append("<a href=\"#\" id=\"tag_%s\">%s</a>" % (tag.replace(':',''),tag))
        return '  '.join(tag_links)
    
    mycache = cache.get_cache('demisauce.tags' )
    # Get the value, this will create the cache copy the first time
    # and any time it expires (in seconds, so 3600 = one hour)
    myvalue = mycache.get_value('tag.%s.linklist' % (tag_type), 
        createfunc=tag_make,expiretime=cachetime)
    return myvalue
Esempio n. 43
0
import datetime

from pylons import config
from sqlalchemy import *
from paste.deploy.converters import asbool

from ckan import model

# Use a private config option to enable the cache, rather than the ckan.cache_enabled,
# because we want to default it to on. It is such an intensive operation.
cache_enabled = asbool(config.get('ckan.stats_cache_enabled', 'True'))
if cache_enabled:
    from pylons import cache
    our_cache = cache.get_cache('stats', type='dbm')

DATE_FORMAT = '%Y-%m-%d'

def table(name):
    return Table(name, model.metadata, autoload=True)

def datetime2date(datetime_):
    return datetime.date(datetime_.year, datetime_.month, datetime_.day)


class Stats(object):
    @classmethod
    def top_rated_packages(cls, limit=10):
        # NB Not using sqlalchemy as sqla 0.4 doesn't work using both group_by
        # and apply_avg
        package = table('package')
        rating = table('rating')
Esempio n. 44
0
from sqlalchemy.sql.expression import text
import ckan.plugins as p
import ckan.logic as logic
import ckan.model as model

import re

cache_enabled = p.toolkit.asbool(
    config.get('ckanext.stats.cache_enabled', 'True'))

if cache_enabled:
    cache_default_timeout = p.toolkit.asint(
        config.get('ckanext.stats.cache_default_timeout', '86400'))
    cache_fast_timeout = p.toolkit.asint(
        config.get('ckanext.stats.cache_fast_timeout', '600'))
    our_cache = cache.get_cache('stats', type='memory')

DATE_FORMAT = '%Y-%m-%d'
TODAY = datetime.date.today()


def table(name):
    return Table(name, model.meta.metadata, autoload=True)


def datetime2date(datetime_):
    return datetime.date(datetime_.year, datetime_.month, datetime_.day)


class Stats(object):
    @classmethod
    def index(self):
        c = p.toolkit.c
        our_cache = cache.get_cache('stats', type='memory')
        public_display, sysadmin_display, cache_timeout = get_cache_config()
        get_stats_display = public_display or (
            sysadmin_display and h.check_access('sysadmin'))
        if get_stats_display:
            stats = stats_lib.Stats()
            stats.init(our_cache, cache_timeout)
            rev_stats = stats_lib.RevisionStats()
            rev_stats.init(our_cache, cache_timeout)
            c.top_rated_packages = stats.top_rated_packages()
            c.most_edited_packages = stats.most_edited_packages()
            c.largest_groups = stats.largest_groups()
            c.top_package_owners = stats.top_package_owners()
            c.summary_stats = stats.summary_stats()
            c.activity_counts = stats.activity_counts()
            c.by_org = stats.by_org()
            c.res_by_org = stats.res_by_org()
            c.top_active_orgs = stats.top_active_orgs()
            c.user_access_list = stats.user_access_list()
            c.recent_datasets = stats.recent_datasets()
            c.new_packages_by_week = rev_stats.get_by_week('new_packages')
            c.num_packages_by_week = rev_stats.get_num_packages_by_week()
            c.package_revisions_by_week = rev_stats.get_by_week(
                'package_revisions')

            # Used in the legacy CKAN templates.
            c.packages_by_week = []

            # Used in new CKAN templates gives more control to the templates for formatting.
            c.raw_packages_by_week = []

            for week_date, num_packages, cumulative_num_packages in c.num_packages_by_week:
                c.packages_by_week.append(
                    '[new Date(%s), %s]' % (week_date.replace('-', ','),
                                            cumulative_num_packages))
                c.raw_packages_by_week.append({
                    'date': h.date_str_to_datetime(week_date),
                    'total_packages': cumulative_num_packages})

            c.all_package_revisions = []
            c.raw_all_package_revisions = []
            week_queue = Queue.Queue()
            for week_date, revs, num_revisions, cumulative_num_revisions in c.package_revisions_by_week:
                c.all_package_revisions.append(
                    '[new Date(%s), %s]' % (week_date.replace('-', ','),
                                            num_revisions))
                c.raw_all_package_revisions.append({
                    'date': h.date_str_to_datetime(week_date),
                    'total_revisions': num_revisions})
                week_queue.put(week_date)

            c.new_datasets = []
            c.raw_new_datasets = []
            for week_date, pkgs, num_packages, cumulative_num_packages in c.new_packages_by_week:
                revision_week_date = week_queue.get()
                while revision_week_date != week_date:
                    c.new_datasets.append(
                        '[new Date(%s), %s]' % (
                            revision_week_date.replace('-', ','), 0))
                    c.raw_new_datasets.append({
                        'date': h.date_str_to_datetime(revision_week_date),
                        'new_packages': 0})
                    revision_week_date = week_queue.get()

                c.new_datasets.append(
                    '[new Date(%s), %s]' % (week_date.replace('-', ','),
                                            num_packages))
                c.raw_new_datasets.append({
                    'date': h.date_str_to_datetime(week_date),
                    'new_packages': num_packages})

            while not week_queue.empty():
                revision_week_date = week_queue.get()
                c.new_datasets.append(
                    '[new Date(%s), %s]' % (
                        revision_week_date.replace('-', ','), 0))
                c.raw_new_datasets.append({
                    'date': h.date_str_to_datetime(revision_week_date),
                    'new_packages': 0})

            return p.toolkit.render('stats/index.html')
        else:
            abort(403, _('Not authorized to see this page'))
Esempio n. 46
0
import datetime

from pylons import config
from sqlalchemy import Table, select, join, func, and_

import ckan.plugins as p
import ckan.model as model

cache_enabled = p.toolkit.asbool(config.get('ckanext.stats.cache_enabled', 'True'))

if cache_enabled:
    from pylons import cache
    our_cache = cache.get_cache('stats', type='dbm')

DATE_FORMAT = '%Y-%m-%d'

def table(name):
    return Table(name, model.meta.metadata, autoload=True)

def datetime2date(datetime_):
    return datetime.date(datetime_.year, datetime_.month, datetime_.day)


class Stats(object):
    @classmethod
    def top_rated_packages(cls, limit=10):
        # NB Not using sqlalchemy as sqla 0.4 doesn't work using both group_by
        # and apply_avg
        package = table('package')
        rating = table('rating')
        sql = select([package.c.id, func.avg(rating.c.rating), func.count(rating.c.rating)], from_obj=[package.join(rating)]).\
Esempio n. 47
0
 def __init__(self, type='dbm'):
     """
     Initialise a dataset index cache
     """
     self.cache_enabled = app_globals.cache_enabled
     self.cache = cache.get_cache('DATASET_INDEX_CACHE', type=type)
Esempio n. 48
0
    def cache_content(self, key, do_work, template):
        """Argh!

        Okay, so.  Use this when you want to cache the BODY of a page but not
        the CHROME (i.e., wrapper or base or whatever).

        ``key``
            The key that uniquely identifies this particular rendering of this
            page content.

        ``do_work``
            Some function that will stuff a bunch of expensive data in c.  This
            will only be called if the page hasn't yet been cached.  It'll be
            passed the key.

        ``template``
            Name of the template to use.

        Also, DO NOT FORGET TO wrap the cachable part of your template in a
        <%lib:cache_content> tag, or nothing will get cached!

        If a page body is pulled from cache, c.timer.from_cache will be set to
        True.  If the page had to be generated, it will be set to False.  (If
        this function wasn't involved at all, it will be set to None.)
        """

        # Content needs to be cached per-language
        key = u"{0}/{1}".format(key, c.lang)

        # Cache for...  ten hours?  Sure, whatever
        content_cache = cache.get_cache('content_cache:' + template,
                                        expiretime=36000)

        # XXX This is dumb.  Caches don't actually respect the 'enabled'
        # setting, so we gotta fake it.
        if not content_cache.nsargs.get('enabled', True):

            def skip_cache(context, mako_def):
                do_work(key)
                mako_def.body()

            c._cache_me = skip_cache
            return render(template)

        # These pages can be pretty big.  In the case of e.g. memcached, that's
        # a lot of RAM spent on giant pages that consist half of whitespace.
        # Solution: gzip everything.  Use level 1 for speed!
        def cache_me(context, mako_def):
            c.timer.from_cache = True

            def generate_page():
                c.timer.from_cache = False
                do_work(key)
                return zlib.compress(
                    capture(context, mako_def.body).encode('utf8'), 1)

            context.write(
                zlib.decompress(
                    content_cache.get_value(
                        key=key, createfunc=generate_page)).decode('utf8'))

        c._cache_me = cache_me

        return render(template)
Esempio n. 49
0
 def __init__(self, dataset, type='dbm'):
     self.dataset = dataset
     self.cache_enabled = app_globals.cache_enabled and \
         not self.dataset.private
     self.cache = cache.get_cache('DSCACHE_' + dataset.name, type=type)
Esempio n. 50
0
 def __init__(self, dataset, type='dbm'):
     self.dataset = dataset
     opt = config.get('openspending.cache_enabled', 'True')
     self.cache_enabled = asbool(opt) and \
             not self.dataset.private
     self.cache = cache.get_cache('DSCACHE_' + dataset.name, type=type)
Esempio n. 51
0
 def init(cls, our_cache=None, cache_timeout=86400):
     if our_cache is None:
         cls._our_cache = cache.get_cache('stats', type='memory')
     else:
         cls._our_cache = our_cache
     cls._cache_timeout = cache_timeout
Esempio n. 52
0
log = logging.getLogger(__name__)

from networkpinger import model

from networkpinger.model import forms

from webhelpers.feedgenerator import Atom1Feed

from pylons import cache
from pylons.decorators.cache import beaker_cache

from repoze.what.predicates import has_permission
from repoze.what.plugins.pylonshq import ActionProtector

mycache = cache.get_cache('alerts', type='memory', expiretime=300)


def get_down():
    f = model.Alert.query_down().all
    return mycache.get_value(key='down', createfunc=f)


def get_up():
    f = model.Alert.query_recent_up
    return mycache.get_value(key='up', createfunc=f)


def get_all_up():
    return mycache.get_value(key="all_up",
                             createfunc=model.Host.get_up_addresses)