def reparent(self, item, dest, skey, *args, **kwargs): """ Moves an entry *item* (and everything beneath it) to another parent-node *dest*. .. seealso:: :func:`canReparent` :param item: URL-safe key of the item which will be moved. :type item: str :param dest: URL-safe key of the new parent for this item. :type dest: str :returns: A rendered success result generated by the default renderer. :raises: :exc:`server.errors.NotFound`, when no entry with the given *id* was found. :raises: :exc:`server.errors.Unauthorized`, if the current user does not have the required permissions. :raises: :exc:`server.errors.PreconditionFailed`, if the *skey* could not be verified. """ if not securitykey.validate(skey, acceptSessionKey=True): raise errors.PreconditionFailed() if not self.canReparent(item, dest): raise errors.Unauthorized() if not self.isValidParent(dest) or item == dest: raise errors.NotAcceptable() ## Test for recursion isValid = False currLevel = db.Get(dest) for x in range(0, 99): if str(currLevel.key()) == item: break if currLevel.key().kind( ) == self.viewSkel().kindName + "_rootNode": # We reached a rootNode isValid = True break currLevel = db.Get(currLevel["parententry"]) if not isValid: raise errors.NotAcceptable() ## Update entry fromItem = db.Get(item) fromItem["parententry"] = dest fromItem["parentrepo"] = str(self.getRootNode(dest).key()) db.Put(fromItem) skel = self.editSkel() assert skel.fromDB(item) self.onItemReparent(skel) self.onItemChanged(skel) return self.render.reparentSuccess(obj=fromItem)
def assignBillSequence(self, orderKey): """ Assigns an unique order-order to the given order. """ def getKeyTxn(kindName, orderKey): """Generates and returns a new, unique Key""" seqObj = db.GetOrInsert(kindName, "viur_bill_sequences", count=1000) idx = seqObj["count"] seqObj["count"] += 1 db.Put(seqObj) return str(idx) def setKeyTxn(orderKey, idx): """Assigns the new order to the given order""" dbObj = db.Get(db.Key(orderKey)) if not dbObj: return dbObj["idx"] = idx db.Put(dbObj) dbObj = db.Get(db.Key(orderKey)) if not dbObj: return idx = db.RunInTransaction(getKeyTxn, self.viewSkel().kindName, orderKey) db.RunInTransaction(setKeyTxn, orderKey, idx) self.billSequenceAvailable(orderKey)
def getEntry(self, module, id, key=None): if not self._checkKey(key, export=True): raise errors.Forbidden() res = db.Get(id) return pickle.dumps(self.genDict(res))
def setKeyTxn(orderKey, idx): """Assigns the new order to the given order""" dbObj = db.Get(db.Key(orderKey)) if not dbObj: return dbObj["idx"] = idx db.Put(dbObj)
def sofortStatus(self, *args, **kwargs): sortOrder = [ "transaction", "user_id", "project_id", "sender_holder", "sender_account_number", "sender_bank_code", "sender_bank_name", "sender_bank_bic", "sender_iban", "sender_country_id", "recipient_holder", "recipient_account_number", "recipient_bank_code", "recipient_bank_name", "recipient_bank_bic", "recipient_iban", "recipient_country_id", "international_transaction", "amount", "currency_id", "reason_1", "reason_2", "security_criteria", "user_variable_0", "user_variable_1", "user_variable_2", "user_variable_3", "user_variable_4", "user_variable_5", "created" ] hashstr = "|".join([kwargs[key] for key in sortOrder] + [conf["sofort"]["notificationpassword"]]) if hashlib.sha512( hashstr.encode("utf-8")).hexdigest() != kwargs["hash"]: logging.error( "RECIVED INVALID HASH FOR sofort (%s!=%s)" % (hashlib.sha512( hashstr.encode("utf-8")).hexdigest(), kwargs["hash"])) return ("INVALID HASH") order = db.Get(db.Key(kwargs["user_variable_0"])) if not order: logging.error("RECIVED UNKNOWN ORDER by sofort (%s)" % (kwargs["user_variable_0"])) return ("UNKNOWN ORDER") if ("%.2f" % order["price"]) != kwargs["amount"]: logging.error("RECIVED INVALID AMOUNT PAYED sofort (%s!=%s)" % (order["price"], kwargs["amount"])) return ("INVALID AMOUNT") self.orderHandler.setPayed(kwargs["user_variable_0"]) return ("OKAY")
def updateTransaction(userKey, idx): user = db.Get(userKey) if not "otptimedrift" in user.keys() or not isinstance( user["otptimedrift"], float): user["otptimedrift"] = 0.0 user["otptimedrift"] += min(max(0.1 * idx, -0.3), 0.3) db.Put(user)
def setIndex(self, item, index, skey, *args, **kwargs): """ Changes the order of the elements in the current level by changing the index of *item*. .. seealso:: :func:`canSetIndex` :param item: URL-safe key of the item which index should be changed. :type item: str :param index: New index for this item. This value must be cast-able to float. :type index: str :returns: A rendered success result generated by the default renderer. :raises: :exc:`server.errors.NotFound`, when no entry with the given *key* was found. :raises: :exc:`server.errors.Unauthorized`, if the current user does not have the required permissions. :raises: :exc:`server.errors.PreconditionFailed`, if the *skey* could not be verified. """ if not securitykey.validate(skey, acceptSessionKey=True): raise errors.PreconditionFailed() if not self.canSetIndex(item, index): raise errors.Unauthorized() fromItem = db.Get(item) fromItem["sortindex"] = float(index) db.Put(fromItem) skel = self.editSkel() assert skel.fromDB(item) self.onItemSetIndex(skel) self.onItemChanged(skel) return self.render.setIndexSuccess(obj=fromItem)
def validate(key, acceptSessionKey=False): """ Validates a onetime securitykey :type key: str :param key: The key to validate :type acceptSessionKey: Bool :param acceptSessionKey: If True, we also accept the session's skey :returns: False if the key was not valid for whatever reasons, the data (given during createSecurityKey) as dictionary or True if the dict is empty. """ if acceptSessionKey: if key == currentSession.getSessionSecurityKey(): return (True) try: dbObj = db.Get(db.Key.from_path(securityKeyKindName, key)) except: return (False) if dbObj: if "session" in dbObj and dbObj["session"] is not None: if dbObj["session"] != currentSession.getSessionKey(): return (False) db.Delete(dbObj.key()) if dbObj["until"] < datetime.now(): #This key has expired return (False) res = {} for k in dbObj.keys(): res[k] = dbObj[k] del res["session"] del res["until"] if not res: return (True) return (res) return (False)
def getRootNode(self, subRepo): """ Returns the root-root-node for a given (sub)-repo. :param subRepo: URL-safe root-node key. :type subRepo: str :returns: :class:`server.db.Entity` """ repo = db.Get(subRepo) if "parentrepo" in repo: return db.Get(repo["parentrepo"]) elif "rootNode" in repo and str(repo["rootNode"]) == "1": return repo return None
def fromClient( self, data ): """ Load supplied *data* into Skeleton. This function works similar to :func:`~server.skeleton.Skeleton.setValues`, except that the values retrieved from *data* are checked against the bones and their validity checks. Even if this function returns False, all bones are guaranteed to be in a valid state. The ones which have been read correctly are set to their valid values; Bones with invalid values are set back to a safe default (None in most cases). So its possible to call :func:`~server.skeleton.Skeleton.toDB` afterwards even if reading data with this function failed (through this might violates the assumed consistency-model). :param data: Dictionary from which the data is read. :type data: dict :returns: True if all data was successfully read and taken by the Skeleton's bones.\ False otherwise (eg. some required fields where missing or invalid). :rtype: bool """ complete = True super(BaseSkeleton, self).__setattr__("errors", {}) for key, _bone in self.items(): if _bone.readOnly: continue error = _bone.fromClient( self.valuesCache, key, data ) if isinstance( error, errors.ReadFromClientError ): self.errors.update( error.errors ) if error.forceFail: complete = False else: self.errors[ key ] = error if error and _bone.required: complete = False logging.info("%s throws error: %s" % (key, error)) for boneName, boneInstance in self.items(): if boneInstance.unique: newVal = boneInstance.getUniquePropertyIndexValue(self.valuesCache, boneName) if newVal is not None: try: dbObj = db.Get(db.Key.from_path("%s_%s_uniquePropertyIndex" % (self.kindName, boneName), newVal)) if dbObj["references"] != self["key"]: #This valus is taken (sadly, not by us) complete = False if isinstance(boneInstance.unique, unicode): errorMsg = _(boneInstance.unique) else: errorMsg = _("This value is not available") self.errors[boneName] = errorMsg except db.EntityNotFoundError: pass if( len(data) == 0 or (len(data) == 1 and "key" in data) or ("nomissing" in data and str(data["nomissing"]) == "1" )): super(BaseSkeleton, self).__setattr__( "errors", {} ) return( complete )
def getRootNode(self, entryKey): """ Returns the root-node for a given child. :param entryKey: URL-Safe key of the child entry :type entryKey: str :returns: The entity of the root-node. :rtype: :class:`server.db.Entity` """ repo = db.Get(entryKey) while repo and "parententry" in repo: repo = db.Get(repo["parententry"]) assert repo and repo.key().kind( ) == self.viewSkel().kindName + "_rootNode" return repo
def txn(orderKey, state, removeState): dbObj = db.Get(db.Key(orderKey)) if not dbObj: return dbObj["state_%s" % state] = "1" if not removeState else "0" dbObj["changedate"] = datetime.now() db.Put(dbObj)
def getOldBlobKeysTxn(dbKey): obj = db.Get(dbKey) res = obj["old_blob_references"] or [] if obj["is_stale"]: db.Delete(dbKey) else: obj["has_old_blob_references"] = False obj["old_blob_references"] = [] db.Put(obj) return res
def txnDelete(key, skel): dbObj = db.Get(db.Key( key)) # Fetch the raw object as we might have to clear locks for boneName, bone in skel.items(): # Ensure that we delete any value-lock objects remaining for this entry if bone.unique: try: logging.error("x1") logging.error(dbObj.keys()) if "%s.uniqueIndexValue" % boneName in dbObj.keys(): logging.error("x2") db.Delete( db.Key.from_path( "%s_%s_uniquePropertyIndex" % (skel.kindName, boneName), dbObj["%s.uniqueIndexValue" % boneName])) except db.EntityNotFoundError: raise pass # Delete the blob-key lock object try: lockObj = db.Get(db.Key.from_path("viur-blob-locks", str(key))) except: lockObj = None if lockObj is not None: if lockObj["old_blob_references"] is None and lockObj[ "active_blob_references"] is None: db.Delete(lockObj) # Nothing to do here else: if lockObj["old_blob_references"] is None: # No old stale entries, move active_blob_references -> old_blob_references lockObj["old_blob_references"] = lockObj[ "active_blob_references"] elif lockObj["active_blob_references"] is not None: # Append the current references to the list of old & stale references lockObj["old_blob_references"] += lockObj[ "active_blob_references"] lockObj["active_blob_references"] = [ ] # There are no active ones left lockObj["is_stale"] = True lockObj["has_old_blob_references"] = True db.Put(lockObj) db.Delete(db.Key(key))
def getSofortURL(self, orderID): order = db.Get(db.Key(orderID)) hashstr = "%s|%s|||||%.2f|EUR|%s||%s||||||%s" % ( conf["sofort"]["userid"], conf["sofort"]["projectid"], float(order["price"]), str(order.key()), str( order.key()), conf["sofort"]["projectpassword"]) hash = hashlib.sha512(hashstr.encode("UTF-8")).hexdigest() returnURL = "https://www.sofortueberweisung.de/payment/start?user_id=%s&project_id=%s&amount=%.2f¤cy_id=EUR&reason_1=%s&user_variable_0=%s&hash=%s" % ( conf["sofort"]["userid"], conf["sofort"]["projectid"], float(order["price"]), str(order.key()), str(order.key()), hash) return (returnURL)
def index(self, *args, **kwargs): global _callableTasks, _periodicTasks logging.debug("Starting maintenance-run") checkUpdate() #Let the update-module verify the database layout first logging.debug("Updatecheck complete") for task, intervall in _periodicTasks.items( ): #Call all periodic tasks if intervall: #Ensure this task doesn't get called to often try: lastCall = db.Get( db.Key.from_path("viur-task-interval", task.periodicTaskName)) if lastCall["date"] > datetime.now() - timedelta( minutes=intervall): logging.debug( "Skipping task %s - Has already run recently." % task.periodicTaskName) continue except db.EntityNotFoundError: pass res = self.findBoundTask(task) if res: #Its bound, call it this way :) res[0]() else: task() #It seems it wasnt bound - call it as a static method logging.debug("Successfully called task %s" % task.periodicTaskName) if intervall: # Update its last-call timestamp entry = db.Entity("viur-task-interval", name=task.periodicTaskName) entry["date"] = datetime.now() db.Put(entry) logging.debug("Periodic tasks complete") for currentTask in db.Query( "viur-queued-tasks").iter(): #Look for queued tasks db.Delete(currentTask.key()) if currentTask["taskid"] in _callableTasks: task = _callableTasks[currentTask["taskid"]]() tmpDict = {} for k in currentTask.keys(): if k == "taskid": continue tmpDict[k] = json.loads(currentTask[k]) try: task.execute(**tmpDict) except Exception as e: logging.error("Error executing Task") logging.exception(e) logging.debug("Scheduled tasks complete")
def startProcessing(self, userKey): user = db.Get(userKey) if all([(x in user and user[x]) for x in ["otpid", "otpkey"]]): logging.info( "OTP wanted for user" ) session.current["_otp_user"] = { "uid": str(userKey), "otpid": user["otpid"], "otpkey": user["otpkey"], "otptimedrift": user["otptimedrift"], "timestamp": time(), "failures": 0} session.current.markChanged() return self.userModule.render.loginSucceeded(msg="X-VIUR-2FACTOR-TimeBasedOTP") return None
def hasblob(self, blobkey, key): if not self._checkKey(key, export=False): raise errors.Forbidden() try: oldKeyHash = sha256(blobkey).hexdigest().encode("hex") res = db.Get(db.Key.from_path("viur-blobimportmap", oldKeyHash)) if res: if "available" in res.keys(): return json.dumps(res["available"]) else: return json.dumps(True) except: pass return json.dumps(False)
def fromDB(self, *args, **kwargs): res = super(TreeLeafSkel, self).fromDB(*args, **kwargs) # Heal missing parent-repo values if res and not self["parentrepo"]: try: dbObj = db.Get(self["key"]) except: return res if not "parentdir" in dbObj: #RootNode return res while ("parentdir" in dbObj and dbObj["parentdir"]): try: dbObj = db.Get(dbObj["parentdir"]) except: return res self["parentrepo"] = str(dbObj.key()) self.toDB() return res
def startProcessing(self, step, orderID): def setTokenTxn(key, token): order = db.Get(key) if not order: return order["paypal_token"] = urllib.unquote(token) db.Put(order) paypal = PayPal.PayPalHandler() key = db.Key(orderID) order = db.Get(key) if not order: return token = paypal.SetExpressCheckout("%.2f" % order["price"]) db.RunInTransaction(setTokenTxn, key, token) raise (errors.Redirect(paypal.getPayURL(token)))
def relSkelFromKey(key): if not isinstance(key, db.Key): key = db.Key(encoded=key) if not key.kind() == self.kind: logging.error( "I got a key, which kind doesn't match my type! (Got: %s, my type %s)" % (key.kind(), self.kind)) return None entity = db.Get(key) if not entity: logging.error("Key %s not found" % str(key)) return None relSkel = RefSkel.fromSkel(skeletonByKind(self.kind), *self.refKeys) relSkel.unserialize(entity) return relSkel
def calculateOrderSum(self, step, orderKey, *args, **kwargs): """ Calculates the final price for this order. This function *must* be called before any attempt is made to start a payment process. :param step: Current step within the ordering process :type step: int :param orderKey: order to calculate the price for :type orderKey: str """ price = sum([x[3] for x in self.getBillItems(orderKey)]) orderObj = db.Get(db.Key(str(orderKey))) orderObj["price"] = price db.Put(orderObj)
def updateInplace(relDict): """ Fetches the entity referenced by valDict["dest.key"] and updates all dest.* keys accordingly """ if isinstance(relDict, dict) and "dest" in relDict.keys(): valDict = relDict["dest"] else: logging.error("Invalid dictionary in updateInplace: %s" % relDict) return if "key" in valDict.keys() and valDict["key"]: originalKey = valDict["key"] else: logging.error("Invalid dictionary in updateInplace: %s" % valDict) return entityKey = normalizeKey(originalKey) if originalKey != entityKey: logging.info("Rewriting %s to %s" % (originalKey, entityKey)) valDict["key"] = entityKey # Try to update referenced values; # If the entity does not exist with this key, ignore # (key was overidden above to have a new appid when transferred). newValues = None try: newValues = db.Get(entityKey) assert newValues is not None except db.EntityNotFoundError: #This entity has been deleted logging.info("The key %s does not exist" % entityKey) except: raise if newValues: for key in valDict.keys(): if key == "key": continue elif key in newValues.keys(): getattr(valDict, key).unserialize(valDict.valuesCache, key, newValues)
def updateInplace(relDict): if isinstance(relDict, dict) and "dest" in relDict: valDict = relDict["dest"] else: logging.error("Invalid dictionary in updateInplace: %s" % relDict) return if "key" in valDict: originalKey = valDict["key"] else: logging.error("Broken fileBone dict") return entityKey = normalizeKey(originalKey) if originalKey != entityKey: logging.info("Rewriting %s to %s" % (originalKey, entityKey)) valDict["key"] = originalKey # Anyway, try to copy a dlkey and servingurl # from the corresponding viur-blobimportmap entity. if "dlkey" in valDict: try: oldKeyHash = sha256( valDict["dlkey"]).hexdigest().encode("hex") logging.info( "Trying to fetch entry from blobimportmap with hash %s" % oldKeyHash) res = db.Get( db.Key.from_path("viur-blobimportmap", oldKeyHash)) except: res = None if res and res["oldkey"] == valDict["dlkey"]: valDict["dlkey"] = res["newkey"] valDict["servingurl"] = res["servingurl"] logging.info("Refreshing file dlkey %s (%s)" % (valDict["dlkey"], valDict["servingurl"])) else: if valDict["servingurl"]: try: valDict["servingurl"] = images.get_serving_url( valDict["dlkey"]) except Exception as e: logging.exception(e)
def wrapF(self, *args, **kwargs): currentRequest = request.current.get() if conf["viur.disableCache"] or currentRequest.disableCache: # Caching disabled if conf["viur.disableCache"]: logging.debug("Caching is disabled by config") return (f(self, *args, **kwargs)) # How many arguments are part of the way to the function called (and how many are just *args) offset = -len(currentRequest.args) or len(currentRequest.pathlist) path = "/" + "/".join(currentRequest.pathlist[:offset]) if not path in urls: # This path (possibly a sub-render) should not be cached logging.debug("Not caching for %s" % path) return (f(self, *args, **kwargs)) key = keyFromArgs(f, userSensitive, languageSensitive, evaluatedArgs, path, args, kwargs) if not key: # Someting is wrong (possibly the parameter-count) # Letz call f, but we knew already that this will clash return (f(self, *args, **kwargs)) try: dbRes = db.Get(db.Key.from_path(viurCacheName, key)) except db.EntityNotFoundError: dbRes = None if dbRes: if not maxCacheTime or \ dbRes["creationtime"] > datetime.now()-timedelta( seconds=maxCacheTime ): # We store it unlimited or the cache is fresh enough logging.debug("This request was served from cache.") currentRequest.response.headers['Content-Type'] = dbRes[ "content-type"].encode("UTF-8") return (dbRes["data"]) # If we made it this far, the request wasnt cached or too old; we need to rebuild it res = f(self, *args, **kwargs) dbEntity = db.Entity(viurCacheName, name=key) dbEntity["data"] = res dbEntity["creationtime"] = datetime.now() dbEntity["path"] = path dbEntity["content-type"] = request.current.get( ).response.headers['Content-Type'] dbEntity.set_unindexed_properties(["data", "content-type" ]) #We can save 2 DB-Writs :) db.Put(dbEntity) logging.debug("This request was a cache-miss. Cache has been updated.") return (res)
def refresh(self): # Update from blobimportmap try: oldKeyHash = sha256(self["dlkey"]).hexdigest().encode("hex") res = db.Get( db.Key.from_path("viur-blobimportmap", oldKeyHash)) except: res = None if res and res["oldkey"] == self["dlkey"]: self["dlkey"] = res["newkey"] self["servingurl"] = res["servingurl"] logging.info("Refreshing file dlkey %s (%s)" % (self["dlkey"], self["servingurl"])) else: if self["servingurl"]: try: self["servingurl"] = images.get_serving_url(self["dlkey"]) except Exception as e: logging.exception(e) super(fileBaseSkel, self).refresh()
def fromDB(self, key): """ Load entity with *key* from the data store into the Skeleton. Reads all available data of entity kind *kindName* and the key *key* from the data store into the Skeleton structure's bones. Any previous data of the bones will discard. To store a Skeleton object to the data store, see :func:`~server.skeleton.Skeleton.toDB`. :param key: A :class:`server.DB.Key`, :class:`server.DB.Query`, or string,\ from which the data shall be fetched. :type key: server.DB.Key | DB.Query | str :returns: True on success; False if the given key could not be found. :rtype: bool """ if isinstance(key, basestring): try: key = db.Key(key) except db.BadKeyError: key = unicode(key) if key.isdigit(): key = long(key) elif not len(key): raise ValueError("fromDB called with empty key!") key = db.Key.from_path(self.kindName, key) if not isinstance(key, db.Key): raise ValueError( "fromDB expects an db.Key instance, an string-encoded key or a long as argument, got \"%s\" instead" % key) if key.kind() != self.kindName: # Wrong Kind return (False) try: dbRes = db.Get(key) except db.EntityNotFoundError: return (False) if dbRes is None: return (False) self.setValues(dbRes) key = str(dbRes.key()) self["key"] = key return (True)
def getOrBuildIndex(self, origQuery): """ Builds a specific index based on origQuery AND local variables (self.indexPage and self.indexMaxPage) Returns a list of starting-cursors for each page. You probably shouldn't call this directly. Use cursorForQuery. :param origQuery: Query to build the index for :type origQuery: db.Query :param key: DB-Key to save the index to :type key: string :returns: [] """ key = self.keyFromQuery(origQuery) if key in self._cache.keys(): #We have it cached return (self._cache[key]) #We dont have it cached - try to load it from DB try: index = db.Get(db.Key.from_path(self._dbType, key)) res = json.loads(index["data"]) self._cache[key] = res return (res) except db.EntityNotFoundError: #Its not in the datastore, too pass #We dont have this index yet.. Build it #Clone the original Query queryRes = origQuery.clone(keysOnly=True).datastoreQuery.Run( limit=self.maxPages * self.pageSize) #Build-Up the index res = [] i = 0 previousCursor = None #The first page dosnt have any cursor for discardedKey in queryRes: if i % self.pageSize == 0: res.append(previousCursor) if i % self.pageSize == (self.pageSize - 1) and i > 1: previousCursor = str(queryRes.cursor().urlsafe()) i += 1 if len(res) == 0: # Ensure that the first page exists res.append(None) entry = db.Entity(self._dbType, name=key) entry["data"] = json.dumps(res) entry["creationdate"] = datetime.now() db.Put(entry) return (res)
def load(self, req): """ Initializes the Session. If the client supplied a valid Cookie, the session is read from the memcache/datastore, otherwise a new, empty session will be initialized. """ self.changed = False self.key = None self.sslKey = None self.sessionSecurityKey = None self.session = {} if self.plainCookieName in req.request.cookies: cookie = req.request.cookies[self.plainCookieName] try: data = db.Get(db.Key.from_path(self.kindName, cookie)) except: return (False) if data: #Loaded successfully from Memcache if data["lastseen"] < time() - conf["viur.session.lifeTime"]: # This session is too old return (False) self.session = pickle.loads(base64.b64decode(data["data"])) self.sslKey = data["sslkey"] if "skey" in data: self.sessionSecurityKey = data["skey"] else: self.reset() if data["lastseen"] < time( ) - 5 * 60: #Refresh every 5 Minutes self.changed = True if req.isSSLConnection and not ( self.sslCookieName in req.request.cookies and req.request.cookies[self.sslCookieName] == self.sslKey and self.sslKey): if self.sslKey: logging.warning( "Possible session hijack attempt! Session dropped.") self.reset() return (False) self.key = str(cookie) return (True)
def authenticateUser(self, userKey, **kwargs): """ Performs Log-In for the current session and the given userKey. This resets the current session: All fields not explicitly marked as persistent by conf["viur.session.persistentFieldsOnLogin"] are gone afterwards. :param authProvider: Which authentication-provider issued the authenticateUser request :type authProvider: object :param userKey: The (DB-)Key of the user we shall authenticate :type userKey: db.Key """ res = db.Get(userKey) assert res, "Unable to authenticate unknown user %s" % userKey oldSession = {k: v for k, v in session.current.items() } #Store all items in the current session session.current.reset() # Copy the persistent fields over for k in conf["viur.session.persistentFieldsOnLogin"]: if k in oldSession.keys(): session.current[k] = oldSession[k] del oldSession session.current["user"] = {} for key in ["name", "status", "access"]: try: session.current["user"][key] = res[key] except: pass session.current["user"]["key"] = str(res.key()) if not "access" in session.current["user"].keys( ) or not session.current["user"]["access"]: session.current["user"]["access"] = [] session.current.markChanged() self.onLogin() return self.render.loginSucceeded(**kwargs)