def _send_confirmation(self, email): token_storage = GenericCache('confirm-email') data = {'email': email, 'user_id': self.user.id} token = make_unique_token(lambda t: not token_storage.get(t)) token_storage.set(token, data, 24 * 3600) GenericMailer.send(make_email(email, template=get_template_module('users/emails/verify_email.txt', user=self.user, email=email, token=token)))
def _cacheNextStartingRecord(self, queryHash, page, record, obj): data = obj or {} data[self._page + 1] = record + 1 Logger.get("search").debug("set page: %s" % data) GenericCache('Search').set((self._sessionHash, queryHash), data, 12 * 3600)
class WPRoomBookingMapOfRoomsWidget(WPNotDecorated): sidemenu_option = 'map' cache = GenericCache('MapOfRooms') def getCSSFiles(self): return WPNotDecorated.getCSSFiles(self) + ['css/mapofrooms.css'] def getJSFiles(self): return WPNotDecorated.getJSFiles(self) + self._includeJSPackage('RoomBooking') def _getTitle(self): return '{} - {}'.format(WPNotDecorated._getTitle(self), _('Map of rooms')) def _get_widget_params(self): default_location = Location.default_location return {'aspects': [a.to_serializable() for a in default_location.aspects], 'buildings': default_location.get_buildings(), 'default_repeat': '{}|0'.format(int(RepeatFrequency.NEVER)), 'default_start_dt': datetime.combine(date.today(), Location.working_time_start), 'default_end_dt': datetime.combine(date.today(), Location.working_time_end), 'repeat_mapping': RepeatMapping.mapping} def _getBody(self, params): cache_key = str(sorted(dict(request.args, lang=session.lang).items())) html = self.cache.get(cache_key) if html is None: params.update(self._get_widget_params()) html = WTemplated('RoomBookingMapOfRoomsWidget').getHTML(params) self.cache.set(cache_key, html, 3600) return html
def principal_from_fossil(fossil, allow_pending=False, allow_groups=True, legacy=True, allow_missing_groups=False, allow_emails=False): """Gets a GroupWrapper or AvatarUserWrapper from a fossil""" from indico.modules.groups import GroupProxy from indico.modules.users import User type_ = fossil['_type'] id_ = fossil['id'] if type_ == 'Avatar': if isinstance(id_, int) or id_.isdigit(): # regular user user = User.get(int(id_)) elif allow_pending: data = GenericCache('pending_identities').get(id_) if not data: raise ValueError("Cannot find user '{}' in cache".format(id_)) data = {k: '' if v is None else v for (k, v) in data.items()} email = data['email'].lower() # check if there is not already a pending user with that e-mail user = User.find_first(email=email, is_pending=True) if not user: user = User(first_name=data.get('first_name') or '', last_name=data.get('last_name') or '', email=email, address=data.get('address', ''), phone=data.get('phone', ''), affiliation=data.get('affiliation', ''), is_pending=True) db.session.add(user) db.session.flush() else: raise ValueError( "Id '{}' is not a number and allow_pending=False".format(id_)) if user is None: raise ValueError('User does not exist: {}'.format(id_)) return user.as_avatar if legacy else user elif allow_emails and type_ == 'Email': return EmailPrincipal(id_) elif allow_groups and type_ in {'LocalGroupWrapper', 'LocalGroup'}: group = GroupProxy(int(id_)) if group.group is None: raise ValueError('Local group does not exist: {}'.format(id_)) return group.as_legacy_group if legacy else group elif allow_groups and type_ in {'LDAPGroupWrapper', 'MultipassGroup'}: provider = fossil['provider'] group = GroupProxy(id_, provider) if group.group is None and not allow_missing_groups: raise ValueError('Multipass group does not exist: {}:{}'.format( provider, id_)) return group.as_legacy_group if legacy else group else: raise ValueError('Unexpected fossil type: {}'.format(type_))
class RHUserEmailsVerify(RHUserBase): flash_user_status = False token_storage = GenericCache('confirm-email') def _validate(self, data): if not data: flash(_('The verification token is invalid or expired.'), 'error') return False, None user = User.get(data['user_id']) if not user or user != self.user: flash( _('This token is for a different Indico user. Please login with the correct account' ), 'error') return False, None existing = UserEmail.find_first(is_user_deleted=False, email=data['email']) if existing and not existing.user.is_pending: if existing.user == self.user: flash( _('This email address is already attached to your account.' )) else: flash( _('This email address is already in use by another account.' ), 'error') return False, existing.user return True, existing.user if existing else None def _process(self): token = request.view_args['token'] data = self.token_storage.get(token) valid, existing = self._validate(data) if valid: self.token_storage.delete(token) if existing and existing.is_pending: logger.info("Found pending user %s to be merged into %s", existing, self.user) # If the pending user has missing names, copy them from the active one # to allow it to be marked as not pending and deleted during the merge. existing.first_name = existing.first_name or self.user.first_name existing.last_name = existing.last_name or self.user.last_name merge_users(existing, self.user) flash( _("Merged data from existing '{}' identity").format( existing.email)) existing.is_pending = False self.user.secondary_emails.add(data['email']) signals.users.email_added.send(self.user, email=data['email']) flash( _('The email address {email} has been added to your account.'). format(email=data['email']), 'success') return redirect(url_for('.user_emails'))
def wrapper(*args, **kwargs): cache = GenericCache('task-locks') name = current_task.name if cache.get(name): Logger.get('celery').warning('Task {} is locked; not executing it'.format(name)) return cache.set(name, True) try: return f(*args, **kwargs) finally: cache.delete(name)
def _get_user_data(self): user_id = request.args.get('user') if user_id is None: return {} elif user_id.isdigit(): # existing indico user user = User.find_first(id=user_id, is_deleted=False) user_data = {t.name: getattr(user, t.name, None) if user else '' for t in PersonalDataType} else: # non-indico user data = GenericCache('pending_identities').get(user_id, {}) user_data = {t.name: data.get(t.name) for t in PersonalDataType} user_data['title'] = get_title_uuid(self.regform, user_data['title']) return user_data
def _getStartingRecord(self, queryHash, page): obj = GenericCache('Search').get((self._sessionHash, queryHash), {}) if page == 1: Logger.get("search").debug("first page") # first page, start with 0 return 0, None elif page in obj: Logger.get("search").debug("hit! %s %s" % (obj[page], obj)) # cache hit! return obj[page], obj else: Logger.get("search").debug("miss") # cache miss, force first page to be loaded self._page = 1 return 0, None
def has_member(self, user): cache = GenericCache('group-membership') key = '{}:{}:{}'.format(self.provider, self.name, user.id) rv = cache.get(key) if rv is not None: return rv elif self.group is None: warn('Tried to check if {} is in invalid group {}'.format( user, self)) rv = False else: rv = any(x[1] in self.group for x in user.iter_identifiers(check_providers=True, providers={self.provider})) cache.set(key, rv, 1800) return rv
def match(self, criteria, exact=False, onlyActivated=True, searchInAuthenticators=False): from indico.modules.users.util import search_users cache = GenericCache('pending_identities') def _process_identities(obj): if isinstance(obj, IdentityInfo): cache.set(obj.provider.name + ":" + obj.identifier, obj.data) return AvatarProvisionalWrapper(obj) else: return obj.as_avatar results = search_users(exact=exact, include_pending=not onlyActivated, include_deleted=not onlyActivated, external=searchInAuthenticators, **{AVATAR_FIELD_MAP[k]: v for (k, v) in criteria.iteritems() if v}) return [_process_identities(obj) for obj in results]
class RHUserEmailsVerify(RHUserBase): token_storage = GenericCache('confirm-email') def _validate(self, data): if not data: flash(_('The verification token is invalid or expired.'), 'error') return False, None user = User.get(data['user_id']) if not user or user != self.user: flash( _('This token is for a different Indico user. Please login with the correct account' ), 'error') return False, None existing = UserEmail.find_first(is_user_deleted=False, email=data['email']) if existing and not existing.user.is_pending: if existing.user == self.user: flash( _('This email address is already attached to your account.' )) else: flash( _('This email address is already in use by another account.' ), 'error') return False, existing.user return True, existing.user if existing else None def _process(self): token = request.view_args['token'] data = self.token_storage.get(token) valid, existing = self._validate(data) if valid: self.token_storage.delete(token) if existing and existing.is_pending: flash( _("Merged data from existing '{}' identity").format( existing.email)) merge_users(existing, self.user) existing.is_pending = False self.user.secondary_emails.add(data['email']) flash( _('The email address {email} has been added to your account.'). format(email=data['email']), 'success') return redirect(url_for('.user_emails'))
def memoize_redis(ttl): """Memoize a function in redis The cached value can be cleared by calling the method ``clear_cached()`` of the decorated function with the same arguments that were used during the function call. To check whether a value has been cached call ``is_cached()`` in the same way. :param ttl: How long the result should be cached. May be a timedelta or a number (seconds). """ from MaKaC.common.cache import GenericCache cache = GenericCache('memoize') def decorator(f): def _get_key(args, kwargs): return f.__name__, make_hashable(args), make_hashable(kwargs) def _clear_cached(*args, **kwargs): cache.delete(_get_key(args, kwargs)) def _is_cached(*args, **kwargs): return cache.get(_get_key(args, kwargs), _notset) is not _notset @wraps(f) def memoizer(*args, **kwargs): if current_app.config['TESTING'] or current_app.config.get('REPL'): # No memoization during tests or in the shell return f(*args, **kwargs) key = _get_key(args, kwargs) value = cache.get(key, _notset) if value is _notset: value = f(*args, **kwargs) cache.set(key, value, ttl) return value memoizer.clear_cached = _clear_cached memoizer.is_cached = _is_cached return memoizer return decorator
class OAuthGrant(object): """OAuth grant token""" #: cache entry to store grant tokens _cache = GenericCache('oauth-grant-tokens') def __init__(self, client_id, code, redirect_uri, user, scopes, expires): self.client_id = client_id self.code = code self.redirect_uri = redirect_uri self.user = user self.scopes = scopes self.expires = expires @property def key(self): return self.make_key(self.client_id, self.code) @property def ttl(self): return self.expires - datetime.utcnow() @classmethod def get(cls, client_id, code): key = cls.make_key(client_id, code) return cls._cache.get(key) @classmethod def make_key(cls, client_id, code): return '{}:{}'.format(client_id, code) def delete(self): self._cache.delete(self.key) def save(self): self._cache.set(key=self.key, val=self, time=self.ttl)
# You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. import re from persistent import Persistent from sqlalchemy.orm import load_only, noload from indico.core.config import Config from indico.util.caching import memoize_request from MaKaC.common import filters from MaKaC.common.cache import GenericCache from MaKaC.common.ObjectHolders import ObjectHolder from MaKaC.common.Locators import Locator _cache = GenericCache('room-mapper') class RoomMapperHolder(ObjectHolder): """ """ idxName = "roomsMapping" counterName = "ROOMS_MAPPING" def match(self, criteria, exact=False): crit = {} for f, v in criteria.items(): crit[f] = [v] if crit.has_key("roommappername"): crit["name"] = crit["roommappername"] f = RoomMapperFilter(_RoomMapperFilterCriteria(crit), None)
def __init__(self, duration=DEFAULT_CACHE_TTL): self._cache = GenericCache("SudsCache") self._duration = duration
# General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. import posixpath from io import BytesIO from flask import redirect from indico.core.config import Config from indico.web.flask.util import send_file from indico.modules.rb.models.rooms import Room from indico.modules.rb.models.photos import Photo from MaKaC.common.cache import GenericCache _cache = GenericCache('Rooms') def _redirect_no_photo(size): return redirect( posixpath.join(Config.getInstance().getImagesBaseURL(), 'rooms/{}_photos/NoPhoto.jpg'.format(size))) def room_photo(roomID, size, **kw): cache_key = 'photo-{}-{}'.format(roomID, size) photo_data = _cache.get(cache_key) if photo_data == '*': return _redirect_no_photo(size) elif photo_data is None:
def handler(req, **params): ContextManager.destroy() logger = Logger.get('httpapi') path, query = req.URLFields['PATH_INFO'], req.URLFields['QUERY_STRING'] if req.method == 'POST': # Convert POST data to a query string queryParams = dict(req.form) for key, value in queryParams.iteritems(): queryParams[key] = [str(value)] query = urllib.urlencode(remove_lists(queryParams)) else: # Parse the actual query string queryParams = parse_qs(query) dbi = DBMgr.getInstance() dbi.startRequest() minfo = HelperMaKaCInfo.getMaKaCInfoInstance() if minfo.getRoomBookingModuleActive(): Factory.getDALManager().connect() apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes' signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes' # Get our handler function and its argument and response type hook, dformat = HTTPAPIHook.parseRequest(path, queryParams) if hook is None or dformat is None: raise apache.SERVER_RETURN, apache.HTTP_NOT_FOUND # Disable caching if we are not just retrieving data (or the hook requires it) if req.method == 'POST' or hook.NO_CACHE: noCache = True ak = error = result = None ts = int(time.time()) typeMap = {} try: session = None if cookieAuth: session = getSessionForReq(req) if not session.getUser(): # ignore guest sessions session = None if apiKey or not session: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user aw = buildAW(ak, req, onlyPublic) # Get rid of API key in cache key if we did not impersonate a user if ak and aw.getUser() is None: cacheKey = normalizeQuery(path, query, remove=('ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) else: cacheKey = normalizeQuery(path, query, remove=('signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) if signature: # in case the request was signed, store the result under a different key cacheKey = 'signed_' + cacheKey else: # We authenticated using a session cookie. if Config.getInstance().getCSRFLevel() >= 2: token = req.headers_in.get('X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken'])) if session.csrf_token != token: raise HTTPAPIError('Invalid CSRF token', apache.HTTP_FORBIDDEN) aw = AccessWrapper() if not onlyPublic: aw.setUser(session.getUser()) userPrefix = 'user-' + session.getUser().getId() + '_' cacheKey = userPrefix + normalizeQuery(path, query, remove=('nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed', 'csrftoken')) # Bail out if the user requires authentication but is not authenticated if onlyAuthed and not aw.getUser(): raise HTTPAPIError('Not authenticated', apache.HTTP_FORBIDDEN) obj = None addToCache = not hook.NO_CACHE cache = GenericCache('HTTPAPI') cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey) if not noCache: obj = cache.get(cacheKey) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: # Perform the actual exporting res = hook(aw, req) if isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = HelperMaKaCInfo.getMaKaCInfoInstance().getAPICacheTTL() cache.set(cacheKey, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError, e: error = e if e.getCode(): req.status = e.getCode() if req.status == apache.HTTP_METHOD_NOT_ALLOWED: req.headers_out['Allow'] = 'GET' if req.method == 'POST' else 'POST'
def handler(prefix, path): path = posixpath.join('/', prefix, path) ContextManager.destroy() clearCache() # init fossil cache logger = Logger.get('httpapi') if request.method == 'POST': # Convert POST data to a query string queryParams = [(key, [x.encode('utf-8') for x in values]) for key, values in request.form.iterlists()] query = urllib.urlencode(queryParams, doseq=1) # we only need/keep multiple values so we can properly validate the signature. # the legacy code below expects a dict with just the first value. # if you write a new api endpoint that needs multiple values get them from # ``request.values.getlist()`` directly queryParams = {key: values[0] for key, values in queryParams} else: # Parse the actual query string queryParams = dict((key, value.encode('utf-8')) for key, value in request.args.iteritems()) query = request.query_string dbi = DBMgr.getInstance() dbi.startRequest() apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes' signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes' scope = 'read:legacy_api' if request.method == 'GET' else 'write:legacy_api' try: oauth_valid, oauth_request = oauth.verify_request([scope]) if not oauth_valid and oauth_request and oauth_request.error_message != 'Bearer token not found.': raise BadRequest('OAuth error: {}'.format( oauth_request.error_message)) elif g.get( 'received_oauth_token' ) and oauth_request.error_message == 'Bearer token not found.': raise BadRequest('OAuth error: Invalid token') except ValueError: # XXX: Dirty hack to workaround a bug in flask-oauthlib that causes it # not to properly urlencode request query strings # Related issue (https://github.com/lepture/flask-oauthlib/issues/213) oauth_valid = False # Get our handler function and its argument and response type hook, dformat = HTTPAPIHook.parseRequest(path, queryParams) if hook is None or dformat is None: raise NotFound # Disable caching if we are not just retrieving data (or the hook requires it) if request.method == 'POST' or hook.NO_CACHE: noCache = True ak = error = result = None ts = int(time.time()) typeMap = {} responseUtil = ResponseUtil() is_response = False try: used_session = None if cookieAuth: used_session = session if not used_session.user: # ignore guest sessions used_session = None if apiKey or oauth_valid or not used_session: if not oauth_valid: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user aw = buildAW(ak, onlyPublic) else: # Access Token (OAuth) at = load_token(oauth_request.access_token.access_token) aw = buildAW(at, onlyPublic) # Get rid of API key in cache key if we did not impersonate a user if ak and aw.getUser() is None: cacheKey = normalizeQuery( path, query, remove=('_', 'ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) else: cacheKey = normalizeQuery(path, query, remove=('_', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) if signature: # in case the request was signed, store the result under a different key cacheKey = 'signed_' + cacheKey else: # We authenticated using a session cookie. if Config.getInstance().getCSRFLevel() >= 2: token = request.headers.get( 'X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken'])) if used_session.csrf_protected and used_session.csrf_token != token: raise HTTPAPIError('Invalid CSRF token', 403) aw = AccessWrapper() if not onlyPublic: aw.setUser(used_session.avatar) userPrefix = 'user-{}_'.format(used_session.user.id) cacheKey = userPrefix + normalizeQuery( path, query, remove=('_', 'nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed', 'csrftoken')) # Bail out if the user requires authentication but is not authenticated if onlyAuthed and not aw.getUser(): raise HTTPAPIError('Not authenticated', 403) addToCache = not hook.NO_CACHE cache = GenericCache('HTTPAPI') cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey) if not noCache: obj = cache.get(cacheKey) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: ContextManager.set("currentAW", aw) # Perform the actual exporting res = hook(aw) if isinstance(res, current_app.response_class): addToCache = False is_response = True result, extra, complete, typeMap = res, {}, True, {} elif isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = api_settings.get('cache_ttl') if ttl > 0: cache.set(cacheKey, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError, e: error = e if e.getCode(): responseUtil.status = e.getCode() if responseUtil.status == 405: responseUtil.headers[ 'Allow'] = 'GET' if request.method == 'POST' else 'POST'
def __init__(self): self.storage = GenericCache('flask-session')
def __init__(self, duration=None): self._cache = GenericCache("SudsCache") if duration is None: duration = 24 * 3600 # we put as default 1 day cache self._duration = duration
def __init__(self, *args, **kwargs): RHRoomBookingBase.__init__(self, *args, **kwargs) self._cache = GenericCache('MapOfRooms')
def handler(prefix, path): path = posixpath.join('/', prefix, path) ContextManager.destroy() clearCache() # init fossil cache logger = Logger.get('httpapi') if request.method == 'POST': # Convert POST data to a query string queryParams = dict((key, value.encode('utf-8')) for key, value in request.form.iteritems()) query = urllib.urlencode(queryParams) else: # Parse the actual query string queryParams = dict((key, value.encode('utf-8')) for key, value in request.args.iteritems()) query = request.query_string dbi = DBMgr.getInstance() dbi.startRequest() apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes' signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes' oauthToken = 'oauth_token' in queryParams # Check if OAuth data is supplied in the Authorization header if not oauthToken and request.headers.get('Authorization') is not None: oauthToken = 'oauth_token' in request.headers.get('Authorization') # Get our handler function and its argument and response type hook, dformat = HTTPAPIHook.parseRequest(path, queryParams) if hook is None or dformat is None: raise NotFound # Disable caching if we are not just retrieving data (or the hook requires it) if request.method == 'POST' or hook.NO_CACHE: noCache = True ak = error = result = None ts = int(time.time()) typeMap = {} responseUtil = ResponseUtil() try: used_session = None if cookieAuth: used_session = session if not used_session.avatar: # ignore guest sessions used_session = None if apiKey or oauthToken or not used_session: if not oauthToken: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user aw = buildAW(ak, onlyPublic) else: # Access Token (OAuth) at = OAuthUtils.OAuthCheckAccessResource() aw = buildAW(at, onlyPublic) # Get rid of API key in cache key if we did not impersonate a user if ak and aw.getUser() is None: cacheKey = normalizeQuery( path, query, remove=('_', 'ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) else: cacheKey = normalizeQuery(path, query, remove=('_', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) if signature: # in case the request was signed, store the result under a different key cacheKey = 'signed_' + cacheKey else: # We authenticated using a session cookie. if Config.getInstance().getCSRFLevel() >= 2: token = request.headers.get( 'X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken'])) if used_session.csrf_protected and used_session.csrf_token != token: raise HTTPAPIError('Invalid CSRF token', 403) aw = AccessWrapper() if not onlyPublic: aw.setUser(used_session.avatar) userPrefix = 'user-' + used_session.avatar.getId() + '_' cacheKey = userPrefix + normalizeQuery( path, query, remove=('_', 'nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed', 'csrftoken')) # Bail out if the user requires authentication but is not authenticated if onlyAuthed and not aw.getUser(): raise HTTPAPIError('Not authenticated', 403) addToCache = not hook.NO_CACHE cache = GenericCache('HTTPAPI') cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey) if not noCache: obj = cache.get(cacheKey) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: ContextManager.set("currentAW", aw) # Perform the actual exporting res = hook(aw) if isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = api_settings.get('cache_ttl') cache.set(cacheKey, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError, e: error = e if e.getCode(): responseUtil.status = e.getCode() if responseUtil.status == 405: responseUtil.headers[ 'Allow'] = 'GET' if request.method == 'POST' else 'POST'
def principal_from_fossil(fossil, allow_pending=False, allow_groups=True, legacy=True, allow_missing_groups=False, allow_emails=False, allow_networks=False): from indico.modules.networks.models.networks import IPNetworkGroup from indico.modules.groups import GroupProxy from indico.modules.users import User type_ = fossil['_type'] id_ = fossil['id'] if type_ == 'Avatar': if isinstance(id_, int) or id_.isdigit(): # regular user user = User.get(int(id_)) elif allow_pending: data = GenericCache('pending_identities').get(id_) if not data: raise ValueError("Cannot find user '{}' in cache".format(id_)) data = {k: '' if v is None else v for (k, v) in data.items()} email = data['email'].lower() # check if there is not already a (pending) user with that e-mail # we need to check for non-pending users too since the search may # show a user from external results even though the email belongs # to an indico account in case some of the search criteria did not # match the indico account user = User.find_first(User.all_emails.contains(email), ~User.is_deleted) if not user: user = User(first_name=data.get('first_name') or '', last_name=data.get('last_name') or '', email=email, address=data.get('address', ''), phone=data.get('phone', ''), affiliation=data.get('affiliation', ''), is_pending=True) db.session.add(user) db.session.flush() else: raise ValueError( "Id '{}' is not a number and allow_pending=False".format(id_)) if user is None: raise ValueError('User does not exist: {}'.format(id_)) return user.as_avatar if legacy else user elif allow_emails and type_ == 'Email': return EmailPrincipal(id_) elif allow_networks and type_ == 'IPNetworkGroup': group = IPNetworkGroup.get(int(id_)) if group is None: raise ValueError('IP network group does not exist: {}'.format(id_)) return group elif allow_groups and type_ in {'LocalGroupWrapper', 'LocalGroup'}: group = GroupProxy(int(id_)) if group.group is None: raise ValueError('Local group does not exist: {}'.format(id_)) return group.as_legacy_group if legacy else group elif allow_groups and type_ in {'LDAPGroupWrapper', 'MultipassGroup'}: provider = fossil['provider'] group = GroupProxy(id_, provider) if group.group is None and not allow_missing_groups: raise ValueError('Multipass group does not exist: {}:{}'.format( provider, id_)) return group.as_legacy_group if legacy else group else: raise ValueError('Unexpected fossil type: {}'.format(type_))
def send_login_info(user, event=None): token_storage = GenericCache('resetpass') endpoint = 'event.confLogin-resetPassword' if event else 'user.signIn-resetPassword' idList = user.getIdentityList() logins = [] for id in idList: if not hasattr(id, 'setPassword'): config = Config.getInstance() extra_message = config.getAuthenticatorConfigById( id.getAuthenticatorTag()).get("ResetPasswordMessage") msg = _( "Sorry, you are using an externally managed account (%s) to login into Indico." ) % id.getLogin() if extra_message: msg += "\n" + extra_message logins.append({ 'tag': id.getAuthenticatorTag(), 'login': id.getLogin(), 'error': msg }) else: tag = id.getAuthenticatorTag() login = id.getLogin() data = {'tag': tag, 'login': login} token = str(uuid.uuid4()) while token_storage.get(token): token = str(uuid.uuid4()) token_storage.set(token, data, 6 * 3600) url = url_for(endpoint, event, token=token, _external=True, _secure=True) logins.append({'tag': tag, 'login': login, 'link': url}) if not logins: url = urlHandlers.UHUserDetails.getURL(user) text = _( "Sorry, we did not find your login.\nPlease, create one here:\n%s" ) % url else: text = _( "You can use the following links within the next six hours to reset your password." ) for entry in logins: text += "\n\n==================\n" if 'link' in entry: text += _( "Click below to reset your password for the %s login '%s':\n" ) % (entry['tag'], entry['login']) text += entry['link'] else: text += entry['error'] text += "\n==================\n" maildata = { "fromAddr": "Indico Mailer <%s>" % Config.getInstance().getNoReplyEmail(), "toList": [user.getEmail()], "subject": _("[%s] Login Information") % getSubjectIndicoTitle(), "body": text } GenericMailer.send(GenericNotification(maildata))
from werkzeug.urls import url_parse from indico.core import signals from indico.core.config import Config from indico.core.db import db from indico.modules.events.layout import layout_settings from indico.modules.events.layout.models.menu import MenuEntry, MenuEntryType, TransientMenuEntry from indico.util.caching import memoize_request from indico.util.signals import named_objects_from_signal from indico.util.string import crc32, return_ascii from indico.web.flask.util import url_for import MaKaC from MaKaC.common.cache import GenericCache _cache = GenericCache('updated-menus') def _menu_entry_key(entry_data): return entry_data.position == -1, entry_data.position, entry_data.name @memoize_request def get_menu_entries_from_signal(): return named_objects_from_signal(signals.event.sidemenu.send(), plugin_attr='plugin') def build_menu_entry_name(name, plugin=None): """ Builds the proper name for a menu entry.
'1': _('Yes') } }), ('checked_in_date', { 'title': _('Check-in date'), 'id': 'checked_in_date' }) ]) DEFAULT_REPORT_CONFIG = { 'items': ('title', 'email', 'affiliation', 'reg_date', 'state'), 'filters': {'fields': {}, 'items': {}} } cache = GenericCache('reglist-config') def _get_filters_from_request(regform): filters = deepcopy(DEFAULT_REPORT_CONFIG['filters']) for field in regform.form_items: if field.is_field and field.input_type in {'single_choice', 'multi_choice', 'country', 'bool', 'checkbox'}: options = request.form.getlist('field_{}'.format(field.id)) if options: filters['fields'][field.id] = options for item in SPECIAL_COLUMN_LABELS.itervalues(): if item.get('filter_choices'): options = request.form.getlist('field_{}'.format(item['id'])) if options: filters['items'][item['id']] = options return filters
def _cache(self): return GenericCache('UpcomingEvents')
def __init__(self, function): self._function = function # Cache bucket per implementation plugin = function.__module__.split('.')[3] self._cache = GenericCache(plugin + 'StatisticsCache')
def handler(req, **params): ContextManager.destroy() logger = Logger.get('httpapi') path, query = req.URLFields['PATH_INFO'], req.URLFields['QUERY_STRING'] if req.method == 'POST': # Convert POST data to a query string queryParams = dict(req.form) for key, value in queryParams.iteritems(): queryParams[key] = [str(value)] query = urllib.urlencode(remove_lists(queryParams)) else: # Parse the actual query string queryParams = parse_qs(query) dbi = DBMgr.getInstance() dbi.startRequest() minfo = HelperMaKaCInfo.getMaKaCInfoInstance() if minfo.getRoomBookingModuleActive(): Factory.getDALManager().connect() mode = path.split('/')[1] apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) no_cache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' # Disable caching if we are not exporting if mode != 'export': no_cache = True # Get our handler function and its argument and response type func, dformat = HTTPAPIHook.parseRequest(path, queryParams) if func is None or dformat is None: raise apache.SERVER_RETURN, apache.HTTP_NOT_FOUND ak = error = result = None ts = int(time.time()) typeMap = {} try: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user aw = buildAW(ak, req, onlyPublic) # Get rid of API key in cache key if we did not impersonate a user if ak and aw.getUser() is None: cache_key = normalizeQuery(path, query, remove=('ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache')) else: cache_key = normalizeQuery(path, query, remove=('signature', 'timestamp', 'nc', 'nocache')) if signature: # in case the request was signed, store the result under a different key cache_key = 'signed_' + cache_key obj = None addToCache = True cache = GenericCache('HTTPAPI') cache_key = RE_REMOVE_EXTENSION.sub('', cache_key) if not no_cache: obj = cache.get(cache_key) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: # Perform the actual exporting res = func(aw, req) if isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = HelperMaKaCInfo.getMaKaCInfoInstance().getAPICacheTTL() cache.set(cache_key, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError, e: error = e if e.getCode(): req.status = e.getCode() if req.status == apache.HTTP_METHOD_NOT_ALLOWED: req.headers_out[ 'Allow'] = 'GET' if req.method == 'POST' else 'POST'