def move(request): 'Move a user to a different access category' params = request.params if params.get('token') != request.session.get_csrf_token(): return dict(isOk=0, message='Invalid session token') userID = authenticated_userid(request) # Load targetUser targetUserID = params.get('targetUserID', 0) targetUser = db.query(User).get(targetUserID) if not targetUser: return dict(isOk=0, message='Could not find targetUserID=%s' % targetUserID) if int(userID) == int(targetUserID): return dict(isOk=0, message='Cannot promote or demote yourself') # Load attributes hasAttributes = False for attributeName in 'is_active', 'is_super': value = params.get(attributeName) if value is None: continue try: value = bool(int(value)) except ValueError: return dict(isOk=0, message='Could not parse %s=%s' % (attributeName, value)) setattr(targetUser, attributeName, value) hasAttributes = True if not hasAttributes: return dict(isOk=0, message='No attributes specified') # Return region_invalidate(get_properties, None, targetUserID) return dict(isOk=1)
def atom(self, repo_name): """Produce an atom-1.0 feed via feedgenerator module""" @cache_region('long_term') def _get_feed_from_cache(key, kind): feed = Atom1Feed( title=self.title % repo_name, link=url('summary_home', repo_name=repo_name, qualified=True), description=self.description % repo_name, language=self.language, ttl=self.ttl ) for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])): feed.add_item(title=self._get_title(cs), link=url('changeset_home', repo_name=repo_name, revision=cs.raw_id, qualified=True), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') kind = 'ATOM' valid = CacheInvalidation.test_and_set_valid(repo_name, kind) if not valid: region_invalidate(_get_feed_from_cache, None, repo_name, kind) return _get_feed_from_cache(repo_name, kind)
def atom(self, repo_name): """Produce an atom-1.0 feed via feedgenerator module""" @cache_region('long_term') def _get_feed_from_cache(key, kind): feed = Atom1Feed(title=self.title % repo_name, link=url('summary_home', repo_name=repo_name, qualified=True), description=self.description % repo_name, language=self.language, ttl=self.ttl) for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])): feed.add_item( title=self._get_title(cs), link=url('changeset_home', repo_name=repo_name, revision=cs.raw_id, qualified=True), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') kind = 'ATOM' valid = CacheInvalidation.test_and_set_valid(repo_name, kind) if not valid: region_invalidate(_get_feed_from_cache, None, repo_name, kind) return _get_feed_from_cache(repo_name, kind)
def rss(self, repo_name): """Produce an rss2 feed via feedgenerator module""" @cache_region('long_term') def _get_feed_from_cache(key): feed = Rss201rev2Feed( title=self.title % repo_name, link=url('summary_home', repo_name=repo_name, qualified=True), description=self.description % repo_name, language=self.language, ttl=self.ttl ) for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])): feed.add_item(title=self._get_title(cs), link=url('changeset_home', repo_name=repo_name, revision=cs.raw_id, qualified=True), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') key = repo_name + '_RSS' inv = CacheInvalidation.invalidate(key) if inv is not None: region_invalidate(_get_feed_from_cache, None, key) CacheInvalidation.set_valid(inv.cache_key) return _get_feed_from_cache(key)
def atom(self, repo_name): """Produce an atom-1.0 feed via feedgenerator module""" @cache_region('long_term', '_get_feed_from_cache') def _get_feed_from_cache(key, kind): feed = Atom1Feed(title=_('%s %s feed') % (c.site_name, repo_name), link=h.canonical_url('summary_home', repo_name=repo_name), description=_('Changes on %s repository') % repo_name, language=language, ttl=ttl) rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) for cs in reversed( list(c.db_repo_scm_instance[-rss_items_per_page:])): feed.add_item( title=self._get_title(cs), link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') kind = 'ATOM' valid = CacheInvalidation.test_and_set_valid(repo_name, kind) if not valid: region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) return _get_feed_from_cache(repo_name, kind)
def imputarActivitat(self,data,hores,minuts,proces,activitatId,fname='imputarActivitat'): """ """ self.log("imputarActivitat") #self.reloadExternalLoginKey() parts = ['dataImputacio=%s' % (data), 'horesImputades=%s' % (hores), 'minutsImputats=%s' % (minuts), 'proces=%s' % (proces), 'activitatId=%s' % (activitatId), 'cas=ACTIVITAT', 'partyId=%s' % (self.tecnicID), 'externalLoginKey=%s' % (self.externalLoginKey), ] url = 'https://maul.upc.es:8444/imputacions/control/imputacioAltaGraella?' + '&'.join(parts) response = self.br.open(url) html = response.read() if self.checkBrowserExpired(html): return 'EXPIRED' exitcode = eval(html) exitcode['hores']=hores exitcode['minuts']=minuts.rjust(2,'0') # Invalidem la cache # getUtility(IRAMCache).invalidate('obtenirImputacions') day1, day2 = self.epitool.getObtenirImputacionsDays(self.request, self.username) region_invalidate('epi.operacions.obtenirImputacions', 'default_term', 'obtenirImputacions', 'epi.operacions.Operacions', self.username, day1, day2) return exitcode
def __get_readme_data(self, repo_name, repo): @cache_region('long_term') def _get_readme_from_cache(key): readme_data = None readme_file = None log.debug('Fetching readme file') try: cs = repo.get_changeset() # fetches TIP renderer = MarkupRenderer() for f in README_FILES: try: readme = cs.get_node(f) readme_file = f readme_data = renderer.render(readme.content, f) log.debug('Found readme %s' % readme_file) break except NodeDoesNotExistError: continue except ChangesetError: log.error(traceback.format_exc()) pass except EmptyRepositoryError: pass except Exception: log.error(traceback.format_exc()) return readme_data, readme_file key = repo_name + '_README' inv = CacheInvalidation.invalidate(key) if inv is not None: region_invalidate(_get_readme_from_cache, None, key) CacheInvalidation.set_valid(inv.cache_key) return _get_readme_from_cache(key)
def canviarImputacio(self,novadata,hores,minuts,iid,fname='canviarImputacio'): """ """ self.log("canviarImputacio") if iid!='': code = iid self.reloadExternalLoginKey() parts = ['timeEntryId=%s' % (iid), 'dataImputacio=%s' % (novadata), 'horesImputades=%s' % (hores), 'minutsImputats=%s' % (minuts), 'externalLoginKey=%s' % (self.externalLoginKey), ] url = 'https://maul.upc.es:8444/imputacions/control/editarImputacio?' + '&'.join(parts) response = self.br.open(url) html = response.read() if self.checkBrowserExpired(html): return 'EXPIRED' # Invalidem la cache # getUtility(IRAMCache).invalidate('obtenirImputacions') day1, day2 = self.epitool.getObtenirImputacionsDays(self.request, self.username) region_invalidate('epi.operacions.obtenirImputacions', 'default_term', 'obtenirImputacions', 'epi.operacions.Operacions', self.username, day1, day2) self.saveSessionData() else: code = "No sha pogut imputar al dia %s. Refresca lepi i mou la imputacio manualment arrossegant-la al dia %s" % (novadata,novadata) code.decode('utf-8') return code.encode('utf-8')
def getCodiImputacio(self,data,minuts,ref,tipus): """ Busquem una imputació filtrant per tipus d'imputació, minuts imputats, data i referència. D'entre tots els resultats, ens quedem amb la que tingui el iid més alt, que serà la última imputada. """ self.log("getCodiImputacio") # Invalidem la cache # getUtility(IRAMCache).invalidate('obtenirImputacions') day1, day2 = self.epitool.getObtenirImputacionsDays(self.request, self.username) region_invalidate('epi.operacions.obtenirImputacions', 'default_term', 'obtenirImputacions', 'epi.operacions.Operacions', self.username, day1, day2) imputacions = self.obtenirImputacions(self.username, data, data) tt = tuple(data.split('-')) imputacio = None newest=True for imp in imputacions: if imputacio!=None: newest = imp['iid']>imputacio['iid'] if imp['date']==tt and ref in imp['referencia'] and HMaMinuts(imp['amount'])==minuts and newest and imp['type']==tipus: imputacio = imp if imputacio: return imputacio['iid'] else: return ''
def atom(self, repo_name): """Produce an atom-1.0 feed via feedgenerator module""" @cache_region('long_term', '_get_feed_from_cache') def _get_feed_from_cache(key, kind): feed = Atom1Feed( title=_('%s %s feed') % (c.site_name, repo_name), link=h.canonical_url('summary_home', repo_name=repo_name), description=_('Changes on %s repository') % repo_name, language=language, ttl=ttl ) rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): feed.add_item(title=self._get_title(cs), link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') kind = 'ATOM' valid = CacheInvalidation.test_and_set_valid(repo_name, kind) if not valid: region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind) return _get_feed_from_cache(repo_name, kind)
def modificarImputacio(self,hores,minuts,iid,comentari='',fname='modificarImputacio'): """ """ self.log("modificarImputacio") self.reloadExternalLoginKey() parts = ['timeEntryId=%s' % (iid), 'horesImputades=%s' % (hores), 'minutsImputats=%s' % (minuts), 'externalLoginKey=%s' % (self.externalLoginKey), ] if comentari: parts.append('editComentari=%s' % quote(comentari)) url = 'https://maul.upc.es:8444/imputacions/control/imputacioEdicioGraella?' + '&'.join(parts) response = self.br.open(url) html = response.read() if self.checkBrowserExpired(html): return 'EXPIRED' exitcode = eval(html) exitcode['hores']=str(int(hores)) exitcode['minuts']=minuts.rjust(2,'0') # Invalidem la cache # getUtility(IRAMCache).invalidate('obtenirImputacions') day1, day2 = self.epitool.getObtenirImputacionsDays(self.request, self.username) region_invalidate('epi.operacions.obtenirImputacions', 'default_term', 'obtenirImputacions', 'epi.operacions.Operacions', self.username, day1, day2) self.saveSessionData() return exitcode
def rss(self, repo_name): """Produce an rss2 feed via feedgenerator module""" @cache_region('long_term') def _get_feed_from_cache(key): feed = Rss201rev2Feed( title=self.title % repo_name, link=url('summary_home', repo_name=repo_name, qualified=True), description=self.description % repo_name, language=self.language, ttl=self.ttl ) for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])): feed.add_item(title=self._get_title(cs), link=url('changeset_home', repo_name=repo_name, revision=cs.raw_id, qualified=True), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') key = repo_name + '_RSS' inv = CacheInvalidation.invalidate(key) if inv is not None: region_invalidate(_get_feed_from_cache, None, key) CacheInvalidation.set_valid(inv.cache_key) return _get_feed_from_cache(key)
def rss(self, repo_name): """Produce an rss2 feed via feedgenerator module""" @cache_region('long_term') def _get_feed_from_cache(key, kind): feed = Rss201rev2Feed(title=self.title % repo_name, link=h.canonical_url('summary_home', repo_name=repo_name), description=self.description % repo_name, language=self.language, ttl=self.ttl) for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])): feed.add_item( title=self._get_title(cs), link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') kind = 'RSS' valid = CacheInvalidation.test_and_set_valid(repo_name, kind) if not valid: region_invalidate(_get_feed_from_cache, None, repo_name, kind) return _get_feed_from_cache(repo_name, kind)
def __get_readme_data(self, repo_name, repo): @cache_region('long_term') def _get_readme_from_cache(key): readme_data = None readme_file = None log.debug('Fetching readme file') try: cs = repo.get_changeset() # fetches TIP renderer = MarkupRenderer() for f in README_FILES: try: readme = cs.get_node(f) readme_file = f readme_data = renderer.render(readme.content, f) log.debug('Found readme %s' % readme_file) break except NodeDoesNotExistError: continue except ChangesetError: log.error(traceback.format_exc()) pass except EmptyRepositoryError: pass except Exception: log.error(traceback.format_exc()) return readme_data, readme_file key = repo_name + '_README' inv = CacheInvalidation.invalidate(key) if inv is not None: region_invalidate(_get_readme_from_cache, None, key) CacheInvalidation.set_valid(inv.cache_key) return _get_readme_from_cache(key)
def move(request): 'Move a user to a different access category' params = request.params if params.get('token') != request.session.get_csrf_token(): return dict(isOk=0, message='Invalid session token') userID = authenticated_userid(request) # Load targetUser targetUserID = params.get('targetUserID', 0) targetUser = db.query(User).get(targetUserID) if not targetUser: return dict(isOk=0, message='Could not find targetUserID=%s' % targetUserID) if int(userID) == int(targetUserID): return dict(isOk=0, message='Cannot promote or demote yourself') # Load attributes hasAttributes = False for attributeName in 'is_active', 'is_super': value = params.get(attributeName) if value is None: continue try: value = bool(int(value)) except ValueError: return dict(isOk=0, message='Could not parse %s=%s' % (attributeName, value)) setattr(targetUser, attributeName, value) hasAttributes = True if not hasAttributes: return dict(isOk=0, message='No attributes specified') # Return region_invalidate(get_properties, None, targetUserID) return dict(isOk=1)
def get_member_cached(self, id, cache_key, close_sess=True, cache_type='default_term', invalidate=False): """ Get member cached function :param id: Object instance ID for cache :param close_sess: Session close after execution :param cache_key: Key concerning cache expire time short_term: 60 seconds default_term: 300 seconds long_term: 3600 seconds :param invalidate: Invalidate this cache """ if invalidate: region_invalidate(_get_member_cached, None, id, close_sess) @cache_region(cache_type, cache_key) def _get_member_cached(id, close_sess): """ Execute when there's no cache """ log.debug("Creating cache for region %s and key %s", cache_type, cache_key) return self.get_member(id, close_sess) self.single_member = True return _get_member_cached(id, close_sess)
def del_category(self): if not request.is_xhr: return abort(404) region_invalidate(CategoryController().tree, 'tree_cache', 'tree') category = dict(request.POST) CategoryItem(category).delete()
def get_member_cached(self, id, cache_key, close_sess=True, cache_type='default_term', invalidate=False): """ Get member cached function :param id: Object instance ID for cache :param close_sess: Session close after execution :param cache_key: Key concerning cache expire time short_term: 60 seconds default_term: 300 seconds long_term: 3600 seconds :param invalidate: Invalidate this cache """ if invalidate: region_invalidate(_get_member_cached, None, id, close_sess) @cache_region(cache_type, cache_key) def _get_member_cached(id, close_sess): """ Execute when there's no cache """ log.debug("Creating cache for region %s and key %s", cache_type, cache_key) return self.get_member(id, close_sess) self.single_member = True return _get_member_cached(id, close_sess)
def rss(self, repo_name): """Produce an rss2 feed via feedgenerator module""" @cache_region('long_term') def _get_feed_from_cache(key, kind): feed = Rss201rev2Feed( title=self.title % repo_name, link=h.canonical_url('summary_home', repo_name=repo_name), description=self.description % repo_name, language=self.language, ttl=self.ttl ) for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])): feed.add_item(title=self._get_title(cs), link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8') kind = 'RSS' valid = CacheInvalidation.test_and_set_valid(repo_name, kind) if not valid: region_invalidate(_get_feed_from_cache, None, repo_name, kind) return _get_feed_from_cache(repo_name, kind)
def js_localization(self): "return localized strings from cache or compute" locale = get_lang()[0] response.headers['Pragma'] = 'public' response.headers['Cache-Control'] = 'max-age=0' response.headers['Content-Type'] = 'text/javascript;charset=utf-8' if self.langchange: region_invalidate(self._js_localization, None, 'baruwajs', locale) return self._js_localization(locale)
def js_localization(self): "return localized strings from cache or compute" locale = get_lang()[0] response.headers['Pragma'] = 'public' response.headers['Cache-Control'] = 'max-age=0' response.headers['Content-Type'] = 'text/javascript;charset=utf-8' if self.langchange: region_invalidate(self._js_localization, None, 'baruwajs', locale) return self._js_localization(locale)
def save_category(self): if not request.is_xhr: return abort(404) region_invalidate(CategoryController().tree, 'tree_cache', 'tree') category = dict(request.POST) category['is_leaf'] = 1 if 'is_leaf' in category else 0 category['id'] = int(category['id']) category['parent_id'] = int(category['parent_id']) if category['id'] != 0 else '' CategoryItem(category).save() pass
def newretweeted(): auth = tweepy.OAuthHandler(consumer_key, consumer_secret) twitter_user = TwitterUser.all().fetch(limit=10) random.shuffle(twitter_user) queue_cnt = 0 for usr in twitter_user: auth.set_access_token(usr.twitter_access_token, usr.twitter_access_token_secret) api = tweepy.API(auth) rts = api.user_timeline(user_id=usr.twitter_id, since_id=usr.last_retweeted_id if usr.last_retweeted_id != 0 else None, include_rts=1, exclude_replies=1, count=30) rts = [t for t in rts if hasattr(t, "retweeted_status")] if len(rts) > 0: usr.last_retweeted_id = rts[0].id usr.put() for t in reversed(rts): tweet_text = u"RT @{0}: {1}".format( t.retweeted_status.user.screen_name, get_tweet_urls_text(t.retweeted_status)) msg_text = htmlentitydecode(u"via {0}: {1}".format( t.user.screen_name, tweet_text)) dt = SavedTweets(tweet_id=t.id, user=usr.user, pushed_flag=False, tweet_text=msg_text) dt.put() taskqueue.add(url='/tasks/push_retweet', name='push_{0}'.format(t.id), countdown=queue_cnt * 120, params=dict(dbkey=dt.key())) queue_cnt += 1 region_invalidate(retrive_tweet_data, "short_term", usr.user, *default_range()) info = "schedualed {0} tweet(s) from {1}".format( queue_cnt, usr.twitter_id) else: info = "no new retweet from {0}".format(usr.twitter_id) logging.info(info)
def index_(request): 'Add a post' # Load text = request.params.get('text', '').strip() if text: # Add db.add(Post(text)) transaction.commit() region_invalidate(get_posts, None) # Return return index(request)
def index_(request): 'Add a post' # Load text = request.params.get('text', '').strip() if text: # Add db.add(Post(text)) transaction.commit() region_invalidate(get_posts, None) # Return return index(request)
def scm_instance_cached(self): @cache_region('long_term') def _c(repo_name): return self.__get_instance() rn = self.repo_name inv = self.invalidate if inv is not None: region_invalidate(_c, None, rn) # update our cache CacheInvalidation.set_valid(inv.cache_key) return _c(rn)
def scm_instance_cached(self): @cache_region('long_term') def _c(repo_name): return self.__get_instance() rn = self.repo_name log.debug('Getting cached instance of repo') inv = self.invalidate if inv is not None: region_invalidate(_c, None, rn) # update our cache CacheInvalidation.set_valid(inv.cache_key) return _c(rn)
def scm_instance_cached(self): @cache_region("long_term") def _c(repo_name): return self.__get_instance() rn = self.repo_name inv = self.invalidate if inv is not None: region_invalidate(_c, None, rn) # update our cache CacheInvalidation.set_valid(inv.cache_key) return _c(rn)
def test_resource_generation_existing(self): from pyramid_beaker import set_cache_regions_from_settings from beaker.cache import region_invalidate request = testing.DummyRequest() request.db = DummyMongoDB_2() settings = self.required_settings() set_cache_regions_from_settings(settings) #setting cache_regions resource_gen = ResourceGenerator(request, generation_tool=DummyBase28) url = 'http://www.scielo.br' region_invalidate(resource_gen.generate, None, url) #invalidate cache self.assertEqual(resource_gen.generate(url), '4kgxx')
def test_resource_generation_existing(self): from pyramid_beaker import set_cache_regions_from_settings from beaker.cache import region_invalidate request = testing.DummyRequest() request.db = DummyMongoDB_2() settings = self.required_settings() set_cache_regions_from_settings(settings) #setting cache_regions resource_gen = ResourceGenerator(request, generation_tool=DummyBase28) url = 'http://www.scielo.br' region_invalidate(resource_gen.generate, None, url)#invalidate cache self.assertEqual(resource_gen.generate(url), '4kgxx')
def Marcar(self): """ Canvia l'estat del marcatge actual """ self.log("Marcar") persones = self.br.open(FITCHA_URL) persones_html = persones.read() if self.checkBrowserExpired(persones_html): return 'EXPIRED' persones.close() # getUtility(IRAMCache).invalidate('getMarcatges') region_invalidate('epi.presencia.getMarcatges', 'long_term', 'getMarcatges', 'epi.presencia.Presencia', self.username) # getUtility(IRAMCache).invalidate('getPresencia') region_invalidate('epi.presencia.getPermisos', 'default_term', 'getPermisos', 'epi.presencia.Presencia', self.username) return True print "S'ha canviat l'estat de marcatge"
def mutate(request): 'Mutate user token' params = request.params if params.get('token') != request.session.get_csrf_token(): return dict(isOk=0, message='Invalid session token') userID = authenticated_userid(request) # Mutate user code user = db.query(User).get(userID) user.code = make_random_string(CODE_LEN) # Refresh cookie if not hasattr(request, 'response_headerlist'): request.response_headerlist = [] request.response_headerlist.extend(remember(request, user.id, tokens=format_tokens(user))) # Return region_invalidate(get_properties, None, userID) return dict(isOk=1, code=user.code)
def imputarTiquet(self,data,hores,minuts,tiquetId,fname='imputarTiquet'): """ Imputa a un tiquet utilitzant el formulari del gestor d'operacions """ self.log("imputarTiquet") today = '-'.join(DateTimeToTT(DateTime())) self.reloadExternalLoginKey() parts = ['requirementId=%s' % (tiquetId), 'externalLoginKey=%s' % (self.externalLoginKey), ] url = 'https://maul.upc.es:8444/tiquets/control/tiquetDetallAssignacioHistoria?' + '&'.join(parts) self.br.open(url) try: self.br.select_form(name='afegirImputacio') except FormNotFoundError: return dict(hores='', minuts='', confirm='error', code='No s''ha pogut imputar en un tiquet tancat.') except: return 'EXPIRED' self.br.form.action='https://maul.upc.es:8444/tiquets/control/imputarTempsTasca' self.br.form.find_control('minutsImputats').readonly=False minutsImputats = int(hores)*60 + int(minuts) self.br['minutsImputats']=str(minutsImputats) self.br['horesImputadesHelper']=hores self.br['minutsImputatsHelper']=minuts addtiquet_response = self.br.submit() #html = addtiquet_response.read() #Consultem el codi de la imputació iid = self.getCodiImputacio(today,minutsImputats,tiquetId,'TI') code = iid.encode('utf-8') if data!=today: code = self.canviarImputacio(data,hores,minuts,iid) # Invalidem la cache # getUtility(IRAMCache).invalidate('obtenirImputacions') day1, day2 = self.epitool.getObtenirImputacionsDays(self.request, self.username) region_invalidate('epi.operacions.obtenirImputacions', 'default_term', 'obtenirImputacions', 'epi.operacions.Operacions', self.username, day1, day2) self.saveSessionData() return dict(hores=hores, minuts=minuts.rjust(2,'0'), confirm=iid=='' and 'error' or 'ok', code=code)
def save_branch(self, j_branch): """ Create/update a supplier branch """ global supplier_id_key supplier_id_key = "supplier_id" if not isinstance(j_branch, dict): import json j_branch = json.loads(j_branch) branch_id = j_branch.pop("supplier_branch_id", None) print j_branch[supplier_id_key] supplier_id = j_branch.pop(supplier_id_key, None) if not supplier_id: raise TypeError("No Supplier Set") def _setattr(branch): branch.id = branch_id if branch.id is None: raise TypeError("Branch id not set") branch.street_address = j_branch.pop("supplier_branch_street_address", None) branch.tel_1 = j_branch.pop("supplier_branch_tel_1", None) branch.tel_2 = j_branch.pop("supplier_branch_tel_2", None) branch.email_address = j_branch.pop("supplier_branch_email", None) branch.website = j_branch.pop("supplier_branch_website", None) return branch def _create(): supplier = DBSession.query(Supplier).get(supplier_id) print supplier.to_dict if supplier is None: raise TypeError("Could not find supplier with id: %s" % supplier_id) branch = SupplierBranch(supplier_id) branch = _setattr(branch) del branch.id with transaction.manager: DBSession.add(branch) def _update(): branch = DBSession.query(SupplierBranch).get(branch_id) branch = _setattr(branch) with transaction.manager: DBSession.merge(branch) _create() if branch_id == -1 else _update() region_invalidate(self._all, "hour", "suppliers")
def save_branch(self, j_branch): """ Create/update a supplier branch """ global supplier_id_key supplier_id_key = "supplier_id" if not isinstance(j_branch, dict): import json j_branch = json.loads(j_branch) branch_id = j_branch.pop("supplier_branch_id", None) print(j_branch[supplier_id_key]) supplier_id = j_branch.pop(supplier_id_key, None) if not supplier_id: raise TypeError("No Supplier Set") def _setattr(branch): branch.id = branch_id if branch.id is None: raise TypeError("Branch id not set") branch.street_address = \ j_branch.pop("supplier_branch_street_address", None) branch.tel_1 = j_branch.pop("supplier_branch_tel_1", None) branch.tel_2 = j_branch.pop("supplier_branch_tel_2", None) branch.email_address = j_branch.pop("supplier_branch_email", None) branch.website = j_branch.pop("supplier_branch_website", None) return branch def _create(): supplier = DBSession.query(Supplier).get(supplier_id) print(supplier.to_json) if supplier is None: raise TypeError("Could not find supplier with id: %s" % supplier_id) branch = SupplierBranch(supplier_id) branch = _setattr(branch) del branch.id with transaction.manager: DBSession.add(branch) def _update(): branch = DBSession.query(SupplierBranch).get(branch_id) branch = _setattr(branch) with transaction.manager: DBSession.merge(branch) _create() if branch_id == -1 else _update() region_invalidate(self._all, "hour", "suppliers")
def get_collection_cached(self, query, cache_key, cache_type='default_term', invalidate=False): """ Get cached results collection :param query: Search query :param cache_key: Key concerning cache expire time short_term: 60 seconds default_term: 300 seconds long_term: 3600 seconds :param invalidate: invalidate cache or not :return: Collection JSON """ if invalidate: region_invalidate(_get_collection_cached, None, query) @cache_region(cache_type, cache_key) def _get_collection_cached(query): """ Return cached collection :param query: Query to be executed against function mode :return: result """ response = { 'results': self.get_collection(query), 'limit': self.default_limit, 'offset': self.default_offset, 'total_count': self.total_count } return response response = _get_collection_cached(query) # NOTE: Fix parameters! By John Doe self.default_limit = response['limit'] self.default_offset = response['offset'] self.total_count = response['total_count'] # NOTE: Return results! By John Doe return response['results']
def get_collection_cached(self, query, cache_key, cache_type='default_term', invalidate=False): """ Get cached results collection :param query: Search query :param cache_key: Key concerning cache expire time short_term: 60 seconds default_term: 300 seconds long_term: 3600 seconds :param invalidate: invalidate cache or not :return: Collection JSON """ if invalidate: region_invalidate(_get_collection_cached, None, query) @cache_region(cache_type, cache_key) def _get_collection_cached(query): """ Return cached collection :param query: Query to be executed against function mode :return: result """ response={ 'results': self.get_collection(query), 'limit': self.default_limit, 'offset': self.default_offset, 'total_count': self.total_count } return response response=_get_collection_cached(query) # NOTE: Fix parameters! By John Doe self.default_limit=response['limit'] self.default_offset=response['offset'] self.total_count=response['total_count'] # NOTE: Return results! By John Doe return response['results']
def test_check_invalidate_region(): func = make_region_cached_func() result = func('Fred') assert 'Fred' in result result2 = func('Fred') assert result == result2 region_invalidate(func, None, 'region_loader', 'Fred') result3 = func('Fred') assert result3 != result2 result2 = func('Fred') assert result3 == result2 # Invalidate a non-existent key region_invalidate(func, None, 'region_loader', 'Fredd') assert result3 == result2
def test_check_invalidate_region(): func = make_region_cached_func() result = func("Fred") assert "Fred" in result result2 = func("Fred") assert result == result2 region_invalidate(func, None, "region_loader", "Fred") result3 = func("Fred") assert result3 != result2 result2 = func("Fred") assert result3 == result2 # Invalidate a non-existent key region_invalidate(func, None, "region_loader", "Fredd") assert result3 == result2
def test_check_invalidate_region_2(): func = make_region_cached_func_2() result = func("Fred") assert "Fred" in result result2 = func("Fred") assert result == result2 region_invalidate(func, None, "Fred") result3 = func("Fred") assert result3 != result2 result2 = func("Fred") assert result3 == result2 # Invalidate a non-existent key region_invalidate(func, None, "Fredd") assert result3 == result2
def scm_instance_cached(self): @cache_region('long_term') def _c(repo_name): return self.__get_instance() # TODO: remove this trick when beaker 1.6 is released # and have fixed this issue with not supporting unicode keys rn = safe_str(self.repo_name) inv = self.invalidate if inv is not None: region_invalidate(_c, None, rn) # update our cache inv.cache_active = True Session.add(inv) Session.commit() return _c(rn)
def __acl__(self): # type: () -> AccessControlListType """ Access Control List (:term:`ACL`) formed of :term:`ACE` defining combinations rules to grant or refuse access. Each :term:`ACE` is defined as ``(outcome, user/group, permission)`` tuples. Called by the configured Pyramid :class:`pyramid.authorization.ACLAuthorizationPolicy`. Caching is automatically handled according to configured application settings and whether the specific ACL combination being requested was already processed recently. """ if "acl" not in cache_regions: cache_regions["acl"] = {"enabled": False} user_id = None if self.request.user is None else self.request.user.id cache_keys = (self.request.method, self.request.path_qs, user_id) if self.request.headers.get("Cache-Control") == "no-cache": region_invalidate(self._get_acl_cached, "acl", *cache_keys) return self._get_acl_cached(*cache_keys)
def generate(self, url): pre_fetch = self._request.db['urls'].find_one({'plain': url}) if pre_fetch is not None: return pre_fetch['short_ref'] attempts = 0 while attempts < 10: weak_short_reference = self._generation_tool.genbase(self._digit_count) url_data = {'plain': url, 'short_ref': weak_short_reference} try: self._request.db['urls'].insert(url_data, safe=True) except pymongo.errors.DuplicateKeyError: attempts += 1 continue else: region_invalidate(self.fetch, None, url_data['short_ref']) return url_data['short_ref'] raise ShortenGenerationError()
def __get_readme_data(self, db_repo): repo_name = db_repo.repo_name @cache_region('long_term') def _get_readme_from_cache(key): readme_data = None readme_file = None log.debug('Looking for README file') try: # get's the landing revision! or tip if fails cs = db_repo.get_landing_changeset() if isinstance(cs, EmptyChangeset): raise EmptyRepositoryError() renderer = MarkupRenderer() for f in README_FILES: try: readme = cs.get_node(f) if not isinstance(readme, FileNode): continue readme_file = f log.debug('Found README file `%s` rendering...' % readme_file) readme_data = renderer.render(readme.content, f) break except NodeDoesNotExistError: continue except ChangesetError: log.error(traceback.format_exc()) pass except EmptyRepositoryError: pass except Exception: log.error(traceback.format_exc()) return readme_data, readme_file key = repo_name + '_README' inv = CacheInvalidation.invalidate(key) if inv is not None: region_invalidate(_get_readme_from_cache, None, key) CacheInvalidation.set_valid(inv.cache_key) return _get_readme_from_cache(key)
def __get_readme_data(self, db_repo): repo_name = db_repo.repo_name @cache_region('long_term') def _get_readme_from_cache(key): readme_data = None readme_file = None log.debug('Looking for README file') try: # get's the landing revision! or tip if fails cs = db_repo.get_landing_changeset() if isinstance(cs, EmptyChangeset): raise EmptyRepositoryError() renderer = MarkupRenderer() for f in README_FILES: try: readme = cs.get_node(f) if not isinstance(readme, FileNode): continue readme_file = f log.debug('Found README file `%s` rendering...' % readme_file) readme_data = renderer.render(readme.content, f) break except NodeDoesNotExistError: continue except ChangesetError: log.error(traceback.format_exc()) pass except EmptyRepositoryError: pass except Exception: log.error(traceback.format_exc()) return readme_data, readme_file key = repo_name + '_README' inv = CacheInvalidation.invalidate(key) if inv is not None: region_invalidate(_get_readme_from_cache, None, key) CacheInvalidation.set_valid(inv.cache_key) return _get_readme_from_cache(key)
def invalidateAll(self): """ """ day1, day2 = self.epiUtility.getObtenirImputacionsDays(self.request, self.username) # Pattern: region_invalidate(nom_funcio referenciada al modul, nom_region, nom_classe a la que pertany la funcio, *parametres) # region_invalidate(obtenirImputacions, None, 'obtenirImputacions', self.username, day1, day2) region_invalidate('epi.operacions.obtenirImputacions', 'default_term', 'obtenirImputacions', 'epi.operacions.Operacions', self.username, day1, day2) # region_invalidate(obtenirPortalTecnic, None, 'obtenirPortalTecnic', self.username) region_invalidate('epi.operacions.obtenirPortalTecnic', 'default_term', 'obtenirPortalTecnic', 'epi.operacions.Operacions', self.username) # region_invalidate(getMarcatges, None, 'getMarcatges', self.username) region_invalidate('epi.presencia.getMarcatges', 'long_term', 'getMarcatges', 'epi.presencia.Presencia', self.username) # region_invalidate(getPermisos, None, 'getPermisos', self.username) region_invalidate('epi.presencia.getPermisos', 'default_term', 'getPermisos', 'epi.presencia.Presencia', self.username)
def esborrarImputacio(self,iid,fname='esborrarImputacio'): """ """ self.log("esborrarImputacio") self.reloadExternalLoginKey() parts = ['timeEntryId=%s' % (iid), 'externalLoginKey=%s' % (self.externalLoginKey), ] url = 'https://maul.upc.es:8444/imputacions/control/imputacioEsborrarGraella?' + '&'.join(parts) response = self.br.open(url) html = response.read() if self.checkBrowserExpired(html): return 'EXPIRED' exitcode = eval(html) # Invalidem la cache # getUtility(IRAMCache).invalidate('obtenirImputacions') day1, day2 = self.epitool.getObtenirImputacionsDays(self.request, self.username) region_invalidate('epi.operacions.obtenirImputacions', 'default_term', 'obtenirImputacions', 'epi.operacions.Operacions', self.username, day1, day2) self.saveSessionData() return exitcode
def apply_user_(ticket): 'Finalize a change to a user account' user_ = db.query(User_).filter((User_.ticket == ticket) & ( User_.when_expired >= datetime.datetime.utcnow())).first() if not user_: raise UserException('') # If the ticket is valid, if user_: # Apply the change and reset rejection_count userID = user_.user_id db.merge( User(id=userID, username=user_.username, password_=user_.password_, nickname=user_.nickname, email=user_.email, rejection_count=0, code=make_random_string(CODE_LEN))) region_invalidate(get_properties, None, userID) # Return return user_
def generate(self, url): #generating index self._request.db['urls'].ensure_index('short_ref', unique=True) self._request.db['urls'].ensure_index('plain', unique=True) pre_fetch = self._request.db['urls'].find_one({'plain': url}) if pre_fetch is not None: return pre_fetch['short_ref'] attempts = 0 while attempts < 10: weak_short_reference = self._generation_tool.genbase( self._digit_count) url_data = {'plain': url, 'short_ref': weak_short_reference} try: self._request.db['urls'].insert(url_data, safe=True) except pymongo.errors.DuplicateKeyError: attempts += 1 continue else: region_invalidate(self.fetch, None, url_data['short_ref']) return url_data['short_ref'] raise ShortenGenerationError()
def invalidate_cached_get_group_package_stuff(): log.info("Invalidating cache for focus countries") bcache.region_invalidate(cached_get_group_package_stuff, 'hdx_memory_cache', 'focus_countries_list')
def invalidate(self, x, y): region_invalidate(self.go, None, "method", x, y)
PatentStatus( id=int(patentStatusID), name=patentStatusName.strip(), )) for patentTypeID, patentTypeName in patentTypes: db.merge( PatentType( id=int(patentTypeID), name=patentTypeName.strip(), )) for phoneID, contactID, phoneNumber, phoneType in phones: db.merge( Phone( id=int(phoneID), contact_id=int(contactID), number=phoneNumber.strip(), type=phoneType.strip(), )) for technologyID, technologyCase, technologyName in technologies: db.merge( Technology( id=int(technologyID), ref=technologyCase.strip(), name=technologyName.strip(), )) # Record db.add(Upload(ip=get_remote_ip(request), when=datetime.datetime.utcnow())) # Return region_invalidate(get_patents, None) return dict(isOk=1)
def invalidate_cached_group_list(): log.info("Invalidating cache for group list") bcache.region_invalidate(cached_group_list, 'hdx_memory_cache', 'cached_grp_list') bcache.region_invalidate(cached_group_iso_to_title, 'hdx_memory_cache', 'cached_grp_iso_to_title')
def invalidate_xml(number): number_normalized = normalize_patent(number) region_invalidate(get_xml, None, 'get_xml', number_normalized)
def invalidate_cached_organization_list(): log.info("Invalidating cache for org list") bcache.region_invalidate(cached_organization_list, 'hdx_memory_cache', 'cached_organization_list')