def serialize(self, skel: 'SkeletonInstance', name: str, parentIndexed: bool) -> bool: """ Serializes this bone into something we can write into the datastore. :param name: The property-name this bone has in its Skeleton (not the description!) :type name: str :returns: dict """ if name in skel.accessedValues: newVal = skel.accessedValues[name] if self.languages and self.multiple: res = db.Entity() res["_viurLanguageWrapper_"] = True for language in self.languages: res[language] = [] if not self.indexed: res.exclude_from_indexes.add(language) if language in newVal: for singleValue in newVal[language]: res[language].append( self.singleValueSerialize( singleValue, skel, name, parentIndexed)) elif self.languages: res = db.Entity() res["_viurLanguageWrapper_"] = True for language in self.languages: res[language] = None if not self.indexed: res.exclude_from_indexes.add(language) if language in newVal: res[language] = self.singleValueSerialize( newVal[language], skel, name, parentIndexed) elif self.multiple: res = [] for singleValue in newVal: res.append( self.singleValueSerialize(singleValue, skel, name, parentIndexed)) else: # No Languages, not Multiple res = self.singleValueSerialize(newVal, skel, name, parentIndexed) skel.dbEntity[name] = res # Ensure our indexed flag is up2date indexed = self.indexed and parentIndexed if indexed and name in skel.dbEntity.exclude_from_indexes: skel.dbEntity.exclude_from_indexes.discard(name) elif not indexed and name not in skel.dbEntity.exclude_from_indexes: skel.dbEntity.exclude_from_indexes.add(name) return True return False
def importBlobFromViur2(dlKey, fileName): if not conf.get("viur.viur2import.blobsource"): return False existingImport = db.Get(db.Key("viur-viur2-blobimport", dlKey)) if existingImport: if existingImport["success"]: return existingImport["dlurl"] return False if conf["viur.viur2import.blobsource"]["infoURL"]: try: importDataReq = urlopen( conf["viur.viur2import.blobsource"]["infoURL"] + dlKey) except: marker = db.Entity(db.Key("viur-viur2-blobimport", dlKey)) marker["success"] = False marker["error"] = "Failed URL-FETCH 1" db.Put(marker) return False if importDataReq.status != 200: marker = db.Entity(db.Key("viur-viur2-blobimport", dlKey)) marker["success"] = False marker["error"] = "Failed URL-FETCH 2" db.Put(marker) return False importData = json.loads(importDataReq.read()) oldBlobName = conf["viur.viur2import.blobsource"][ "gsdir"] + "/" + importData["key"] srcBlob = storage.Blob( bucket=bucket, name=conf["viur.viur2import.blobsource"]["gsdir"] + "/" + importData["key"]) else: oldBlobName = conf["viur.viur2import.blobsource"]["gsdir"] + "/" + dlKey srcBlob = storage.Blob( bucket=bucket, name=conf["viur.viur2import.blobsource"]["gsdir"] + "/" + dlKey) if not srcBlob.exists(): marker = db.Entity(db.Key("viur-viur2-blobimport", dlKey)) marker["success"] = False marker["error"] = "Local SRC-Blob missing" marker["oldBlobName"] = oldBlobName db.Put(marker) return False bucket.rename_blob(srcBlob, "%s/source/%s" % (dlKey, fileName)) marker = db.Entity(db.Key("viur-viur2-blobimport", dlKey)) marker["success"] = True marker["old_src_key"] = dlKey marker["old_src_name"] = fileName marker["dlurl"] = utils.downloadUrlFor(dlKey, fileName, False, None) db.Put(marker) return marker["dlurl"]
def storeEntry2(self, e, key): if not self._checkKey(key, export=False): raise errors.Forbidden() entry = pickle.loads(e.decode("HEX")) if not "key" in entry and "id" in entry: entry["key"] = entry["id"] for k in list(entry.keys())[:]: if isinstance(entry[k], str): entry[k] = entry[k].decode("UTF-8") key = db.Key(encoded=utils.normalizeKey(entry["key"])) dbEntry = db.Entity(kind=key.kind(), parent=key.parent(), id=key.id(), name=key.name()) # maybe some more fixes here ? for k in entry.keys(): if k != "key": val = entry[k] dbEntry[k] = val db.Put(dbEntry) if dbEntry.key().id(): # Ensure the Datastore knows that it's id is in use datastore._GetConnection()._reserve_keys([dbEntry.key()]) try: skel = skeletonByKind(key.kind())() except: logging.error("Unknown Skeleton - skipping") skel.fromDB(str(dbEntry.key())) skel.refresh() skel.toDB(clearUpdateTag=True)
def upload(self, oldkey, *args, **kwargs): res = [] for upload in self.getUploads(): fileName = decodeFileName(upload.filename) if str(upload.content_type).startswith("image/"): try: servingURL = get_serving_url(upload.key()) except: servingURL = "" else: servingURL = "" res.append({ "name": fileName, "size": upload.size, "mimetype": upload.content_type, "dlkey": str(upload.key()), "servingurl": servingURL, "parentdir": "", "parentrepo": "", "weak": False }) oldkey = decodeFileName(oldkey) oldKeyHash = sha256(oldkey).hexdigest().encode("hex") e = db.Entity("viur-blobimportmap", name=oldKeyHash) e["newkey"] = str(upload.key()) e["oldkey"] = oldkey e["servingurl"] = servingURL e["available"] = True db.Put(e) return (json.dumps({"action": "addSuccess", "values": res}))
def doCheckForUnreferencedBlobs(cursor=None): def getOldBlobKeysTxn(dbKey): obj = db.Get(dbKey) res = obj["old_blob_references"] or [] if obj["is_stale"]: db.Delete(dbKey) else: obj["has_old_blob_references"] = False obj["old_blob_references"] = [] db.Put(obj) return res query = db.Query("viur-blob-locks").filter("has_old_blob_references", True).setCursor(cursor) for lockObj in query.run(100): oldBlobKeys = db.RunInTransaction(getOldBlobKeysTxn, lockObj.key) for blobKey in oldBlobKeys: if db.Query("viur-blob-locks").filter("active_blob_references =", blobKey).getEntry(): # This blob is referenced elsewhere logging.info("Stale blob is still referenced, %s" % blobKey) continue # Add a marker and schedule it for deletion fileObj = db.Query("viur-deleted-files").filter("dlkey", blobKey).getEntry() if fileObj: # Its already marked logging.info("Stale blob already marked for deletion, %s" % blobKey) return fileObj = db.Entity(db.Key("viur-deleted-files")) fileObj["itercount"] = 0 fileObj["dlkey"] = str(blobKey) logging.info("Stale blob marked dirty, %s" % blobKey) db.Put(fileObj) newCursor = query.getCursor() if newCursor: doCheckForUnreferencedBlobs(newCursor)
def singleValueUnserialize(self, val, skel: 'viur.core.skeleton.SkeletonInstance', name: str): """ Restores one of our values (including the Rel- and Using-Skel) from the serialized data read from the datastore :param value: Json-Encoded datastore property :return: Our Value (with restored RelSkel and using-Skel) """ if isinstance(val, str): try: value = extjson.loads(val) except: value = None else: value = val if not value: return None elif isinstance(value, list) and value: value = value[0] assert isinstance(value, dict), "Read something from the datastore thats not a dict: %s" % str(type(value)) if "dest" not in value: return None relSkel, usingSkel = self._getSkels() relSkel.unserialize(value["dest"]) if self.using is not None: usingSkel.unserialize(value["rel"] or db.Entity()) usingData = usingSkel else: usingData = None return {"dest": relSkel, "rel": usingData}
def jsonDecodeObjectHook(obj): """ Inverse to JsonKeyEncoder: Check if the object matches a custom ViUR type and recreate it accordingly """ if len(obj) == 1: if ".__key__" in obj: return db.KeyClass.from_legacy_urlsafe(obj[".__key__"]) elif ".__datetime__" in obj: value = datetime.strptime(obj[".__datetime__"], "d.%m.%Y %H:%M:%S") return datetime(value.year, value.month, value.day, value.hour, value.minute, value.second, tzinfo=pytz.UTC) elif ".__bytes__" in obj: return base64.b64decode(obj[".__bytes__"]) elif len(obj) == 2 and ".__entity__" in obj and ".__ekey__" in obj: r = db.Entity( db.KeyClass.from_legacy_urlsafe(obj[".__ekey__"] ) if obj[".__ekey__"] else None) r.update(obj[".__entity__"]) return r return obj
def updateTxn(cacheKey): key = db.Key(self.rateLimitKind, cacheKey) obj = db.Get(key) if obj is None: obj = db.Entity(key) obj["value"] = 0 obj["value"] += 1 obj["expires"] = utils.utcNow() + timedelta(minutes=2 * self.minutes) db.Put(obj)
def save(self, req): """ Writes the session to the memcache/datastore. Does nothing, if the session hasn't been changed in the current request. """ try: if self.changed or self.isInitial: serialized = base64.b64encode( pickle.dumps(self.session, protocol=pickle.HIGHEST_PROTOCOL)) # Get the current user id try: # Check for our custom user-api userid = conf["viur.mainApp"].user.getCurrentUser()["key"] except: userid = None if self.isInitial and not req.isSSLConnection: # Reset the Secure only key to None - we can't set it anyway. self.sslKey = None try: dbSession = db.Entity(db.Key(self.kindName, self.httpKey)) dbSession["data"] = serialized dbSession["sslkey"] = self.sslKey dbSession["staticSecurityKey"] = self.staticSecurityKey dbSession["securityKey"] = self.securityKey dbSession["lastseen"] = time() # Store the userid inside the sessionobj, so we can kill specific sessions if needed dbSession["user"] = str(userid) or "guest" db.Put(dbSession) except Exception as e: logging.exception(e) raise # FIXME pass if self.sameSite: sameSite = "; SameSite=%s" % self.sameSite else: sameSite = "" req.response.headerlist.append( ("Set-Cookie", "%s=%s; Max-Age=99999; Path=/; HttpOnly%s" % (self.plainCookieName, self.httpKey, sameSite))) if req.isSSLConnection: req.response.headerlist.append( ("Set-Cookie", "%s=%s; Max-Age=99999; Path=/; Secure; HttpOnly%s" % (self.sslCookieName, self.sslKey, sameSite))) except Exception as e: raise # FIXME logging.exception(e)
def wrapF(self, *args, **kwargs) -> Union[str, bytes]: currReq = currentRequest.get() if conf["viur.disableCache"] or currReq.disableCache: # Caching disabled if conf["viur.disableCache"]: logging.debug("Caching is disabled by config") return f(self, *args, **kwargs) # How many arguments are part of the way to the function called (and how many are just *args) offset = -len(currReq.args) or len(currReq.pathlist) path = "/" + "/".join(currReq.pathlist[:offset]) if not path in urls: # This path (possibly a sub-render) should not be cached logging.debug("Not caching for %s" % path) return f(self, *args, **kwargs) key = keyFromArgs(f, userSensitive, languageSensitive, evaluatedArgs, path, args, kwargs) if not key: # Something is wrong (possibly the parameter-count) # Let's call f, but we knew already that this will clash return f(self, *args, **kwargs) dbRes = db.Get(db.Key(viurCacheName, key)) if dbRes is not None: if not maxCacheTime \ or dbRes["creationtime"] > utils.utcNow() - timedelta(seconds=maxCacheTime): # We store it unlimited or the cache is fresh enough logging.debug("This request was served from cache.") currReq.response.headers['Content-Type'] = dbRes[ "content-type"] return dbRes["data"] # If we made it this far, the request wasn't cached or too old; we need to rebuild it oldAccessLog = db.startDataAccessLog() try: res = f(self, *args, **kwargs) finally: accessedEntries = db.endDataAccessLog(oldAccessLog) dbEntity = db.Entity(db.Key(viurCacheName, key)) dbEntity["data"] = res dbEntity["creationtime"] = utils.utcNow() dbEntity["path"] = path dbEntity["content-type"] = currReq.response.headers['Content-Type'] dbEntity["accessedEntries"] = list(accessedEntries) dbEntity.exclude_from_indexes = ["data", "content-type" ] # We can save 2 DB-Writs :) db.Put(dbEntity) logging.debug("This request was a cache-miss. Cache has been updated.") return res
def wrapF(self, *args, **kwargs): currentRequest = request.current.get() if conf["viur.disableCache"] or currentRequest.disableCache: # Caching disabled if conf["viur.disableCache"]: logging.debug("Caching is disabled by config") return (f(self, *args, **kwargs)) # How many arguments are part of the way to the function called (and how many are just *args) offset = -len(currentRequest.args) or len(currentRequest.pathlist) path = "/" + "/".join(currentRequest.pathlist[:offset]) if not path in urls: # This path (possibly a sub-render) should not be cached logging.debug("Not caching for %s" % path) return (f(self, *args, **kwargs)) key = keyFromArgs(f, userSensitive, languageSensitive, evaluatedArgs, path, args, kwargs) if not key: # Someting is wrong (possibly the parameter-count) # Letz call f, but we knew already that this will clash return (f(self, *args, **kwargs)) try: dbRes = db.Get(db.Key.from_path(viurCacheName, key)) except db.EntityNotFoundError: dbRes = None if dbRes: if not maxCacheTime or \ dbRes["creationtime"] > datetime.now() - timedelta(seconds=maxCacheTime): # We store it unlimited or the cache is fresh enough logging.debug("This request was served from cache.") currentRequest.response.headers['Content-Type'] = dbRes[ "content-type"].encode("UTF-8") return (dbRes["data"]) # If we made it this far, the request wasnt cached or too old; we need to rebuild it res = f(self, *args, **kwargs) dbEntity = db.Entity(viurCacheName, name=key) dbEntity["data"] = res dbEntity["creationtime"] = datetime.now() dbEntity["path"] = path dbEntity["content-type"] = request.current.get( ).response.headers['Content-Type'] dbEntity.set_unindexed_properties(["data", "content-type" ]) # We can save 2 DB-Writs :) db.Put(dbEntity) logging.debug("This request was a cache-miss. Cache has been updated.") return (res)
def getOrBuildIndex(self, origQuery): """ Builds a specific index based on origQuery AND local variables (self.indexPage and self.indexMaxPage) Returns a list of starting-cursors for each page. You probably shouldn't call this directly. Use cursorForQuery. :param origQuery: Query to build the index for :type origQuery: db.Query :returns: [] """ key = self.keyFromQuery(origQuery) if key in self._cache: # We have it cached return self._cache[key] # We don't have it cached - try to load it from DB try: index = db.Get(db.Key.from_path(self._dbType, key)) res = json.loads(index["data"]) self._cache[key] = res return res except db.EntityNotFoundError: # Its not in the datastore, too pass # We don't have this index yet.. Build it # Clone the original Query queryRes = origQuery.clone(keysOnly=True).datastoreQuery.Run( limit=self.maxPages * self.pageSize) # Build-Up the index res = list() previousCursor = None # The first page dosnt have any cursor # enumerate is slightly faster than a manual loop counter for counter, discardedKey in enumerate(queryRes): if counter % self.pageSize == 0: res.append(previousCursor) if counter % self.pageSize == (self.pageSize - 1): previousCursor = str(queryRes.cursor().urlsafe()) if not len(res): # Ensure that the first page exists res.append(None) entry = db.Entity(self._dbType, name=key) entry["data"] = json.dumps(res) entry["creationdate"] = datetime.now() db.Put(entry) return res
def save(self, req): """ Writes the session to the datastore. Does nothing, if the session hasn't been changed in the current request. """ try: if self.changed or self.isInitial: if not (req.isSSLConnection or req.isDevServer ): # We will not issue sessions over http anymore return False # Get the current user id try: # Check for our custom user-api userid = conf["viur.mainApp"].user.getCurrentUser()["key"] except: userid = None try: dbSession = db.Entity(db.Key(self.kindName, self.cookieKey)) dbSession["data"] = db.fixUnindexableProperties( self.session) dbSession["staticSecurityKey"] = self.staticSecurityKey dbSession["securityKey"] = self.securityKey dbSession["lastseen"] = time() # Store the userid inside the sessionobj, so we can kill specific sessions if needed dbSession["user"] = str(userid) or "guest" dbSession.exclude_from_indexes = ["data"] db.Put(dbSession) except Exception as e: logging.exception(e) raise # FIXME pass sameSite = "; SameSite=%s" % self.sameSite if self.sameSite else "" secure = "; Secure" if not req.isDevServer else "" maxAge = "; Max-Age=%s" % conf[ "viur.session.lifeTime"] if not self.sessionCookie else "" req.response.headerlist.append( ("Set-Cookie", "%s=%s; Path=/; HttpOnly%s%s%s" % (self.cookieName, self.cookieKey, sameSite, secure, maxAge))) except Exception as e: raise # FIXME logging.exception(e)
def create(duration: Union[None, int] = None, **kwargs) -> str: """ Creates a new onetime Securitykey or returns the current sessions csrf-token. The custom data (given as keyword arguments) that can be stored with the key if :param:duration is set must be serializable by the datastore. :param duration: Make this key valid for a fixed timeframe (and independent of the current session) :returns: The new onetime key """ if not duration: return currentSession.get().getSecurityKey() key = generateRandomString() duration = int(duration) dbObj = db.Entity(db.Key(securityKeyKindName, key)) for k, v in kwargs.items(): dbObj[k] = v dbObj["until"] = utcNow() + timedelta(seconds=duration) db.Put(dbObj) return key
def storeEntry(self, e, key): if not self._checkKey(key, export=False): raise errors.Forbidden() entry = pickle.loads(bytes.fromhex(e)) for k in list(entry.keys()): if isinstance(entry[k], bytes): entry[k] = entry[k].decode("UTF-8") for k in list(entry.keys()): if k in entry and "." in k: base = k.split(".")[0] tmpDict = {} for subkey in list(entry.keys()): if subkey.startswith("%s." % base): postfix = subkey.split(".")[1] tmpDict[postfix] = entry[subkey] del entry[subkey] entry[base] = tmpDict for k in list(entry.keys()): if "-" in k: entry[k.replace("-", "_")] = entry[k] del entry[k] if "key" in entry: key = db.Key(*entry["key"].split("/")) else: raise AttributeError() dbEntry = db.Entity(key) # maybe some more fixes here ? for k in entry.keys(): if k != "key": val = entry[k] # if isinstance(val, dict) or isinstance(val, list): # val = pickle.dumps( val ) dbEntry[k] = val #dbEntry = fixUnindexed(dbEntry) try: db.Put(dbEntry) except: from pprint import pprint pprint(dbEntry) raise
def reset(self): """ Invalids the current session and starts a new one. This function is especially useful at login, where we might need to create an SSL-capable session. :warning: Everything (except the current language) is flushed. """ lang = self.session.get("language") if self.cookieKey: db.Delete(db.Key(self.kindName, self.cookieKey)) self.cookieKey = utils.generateRandomString(42) self.staticSecurityKey = utils.generateRandomString(13) self.securityKey = utils.generateRandomString(13) self.changed = True self.isInitial = True self.session = db.Entity() if lang: self.session["language"] = lang
def create(duration: Union[None, int] = None, **kwargs): """ Creates a new onetime Securitykey for the current session If duration is not set, this key is valid only for the current session. Otherwise, the key and its data is serialized and saved inside the datastore for up to duration-seconds :param duration: Make this key valid for a fixed timeframe (and independend of the current session) :type duration: int or None :returns: The new onetime key """ if not duration: return currentSession.getSecurityKey() key = generateRandomString() duration = int(duration) dbObj = db.Entity(securityKeyKindName, name=key) for k, v in kwargs.items(): dbObj[k] = v dbObj["until"] = datetime.now() + timedelta(seconds=duration) db.Put(dbObj) return key
def markFileForDeletion(dlkey): """ Adds a marker to the data store that the file specified as *dlkey* can be deleted. Once the mark has been set, the data store is checked four times (default: every 4 hours) if the file is in use somewhere. If it is still in use, the mark goes away, otherwise the mark and the file are removed from the datastore. These delayed checks are necessary due to database inconsistency. :type dlkey: str :param dlkey: Unique download-key of the file that shall be marked for deletion. """ fileObj = db.Query("viur-deleted-files").filter("dlkey", dlkey).getEntry() if fileObj: # Its allready marked return fileObj = db.Entity(db.Key("viur-deleted-files")) fileObj["itercount"] = 0 fileObj["dlkey"] = str(dlkey) db.Put(fileObj)
def sendEMail(*, tpl: str = None, stringTemplate: str = None, skel: Union[None, Dict, "SkeletonInstance", List["SkeletonInstance"]] = None, sender: str = None, dests: Union[str, List[str]] = None, cc: Union[str, List[str]] = None, bcc: Union[str, List[str]] = None, headers: Dict[str, str] = None, attachments: List[Dict[str, Any]] = None, context: Union[db.DATASTORE_BASE_TYPES, List[db.DATASTORE_BASE_TYPES], db.Entity] = None, **kwargs) -> Any: """ General purpose function for sending e-mail. This function allows for sending e-mails, also with generated content using the Jinja2 template engine. Your have to implement a method which should be called to send the prepared email finally. For this you have to allocate *viur.email.transportClass* in conf. :param tpl: The name of a template from the deploy/emails directory. :param stringTemplate: This string is interpreted as the template contents. Alternative to load from template file. :param skel: The data made available to the template. In case of a Skeleton or SkelList, its parsed the usual way;\ Dictionaries are passed unchanged. :param sender: The address sending this mail. :param dests: A list of addresses to send this mail to. A bare string will be treated as a list with 1 address. :param cc: Carbon-copy recipients. A bare string will be treated as a list with 1 address. :param bcc: Blind carbon-copy recipients. A bare string will be treated as a list with 1 address. :param headers: Specify headers for this email. :param attachments: List of files to be sent within the mail as attachments. Each attachment must be a dictionary with these keys: filename (string): Name of the file that's attached. Always required content (bytes): Content of the attachment as bytes. Required for the send in blue API. mimetype (string): Mimetype of the file. Suggested parameter for other implementations (not used by SIB) gcsfile (string): Link to a GCS-File to include instead of content. Not supported by the current SIB implementation :param context: Arbitrary data that can be stored along the queue entry to be evaluated in transportSuccessfulCallback (useful for tracking delivery / opening events etc). .. Warning: As emails will be queued (and not send directly) you cannot exceed 1MB in total (for all text and attachments combined)! """ # First, ensure we're able to send email at all transportClass = conf[ "viur.email.transportClass"] # First, ensure we're able to send email at all assert issubclass( transportClass, EmailTransport), "No or invalid email transportclass specified!" # Ensure that all recipient parameters (dest, cc, bcc) are a list dests = normalizeToList(dests) cc = normalizeToList(cc) bcc = normalizeToList(bcc) assert dests or cc or bcc, "No destination address given" attachments = normalizeToList(attachments) if not (bool(stringTemplate) ^ bool(tpl)): raise ValueError( "You have to set the params 'tpl' xor a 'stringTemplate'.") if attachments: # Ensure each attachment has the filename key and rewrite each dict to db.Entity so we can exclude # it from being indexed for _ in range(0, len(attachments)): attachment = attachments.pop(0) assert "filename" in attachment entity = db.Entity() for k, v in attachment.items(): entity[k] = v entity.exclude_from_indexes.add(k) attachments.append(entity) assert all(["filename" in x for x in attachments ]), "Attachment is missing the filename key" # If conf["viur.email.recipientOverride"] is set we'll redirect any email to these address(es) if conf["viur.email.recipientOverride"]: logging.warning("Overriding destination %s with %s", dests, conf["viur.email.recipientOverride"]) oldDests = dests newDests = normalizeToList(conf["viur.email.recipientOverride"]) dests = [] for newDest in newDests: if newDest.startswith("@"): for oldDest in oldDests: dests.append( oldDest.replace(".", "_dot_").replace("@", "_at_") + newDest) else: dests.append(newDest) cc = bcc = [] elif conf["viur.email.recipientOverride"] is False: logging.warning( "Sending emails disabled by config[viur.email.recipientOverride]") return False if conf["viur.email.senderOverride"]: sender = conf["viur.email.senderOverride"] elif sender is None: sender = f"viur@{projectID}.appspotmail.com" subject, body = conf["viur.emailRenderer"](dests, tpl, stringTemplate, skel, **kwargs) # Push that email to the outgoing queue queueEntity = db.Entity(db.Key("viur-emails")) queueEntity["isSend"] = False queueEntity["errorCount"] = 0 queueEntity["creationDate"] = utils.utcNow() queueEntity["sender"] = sender queueEntity["dests"] = dests queueEntity["cc"] = cc queueEntity["bcc"] = bcc queueEntity["subject"] = subject queueEntity["body"] = body queueEntity["headers"] = headers queueEntity["attachments"] = attachments queueEntity["context"] = context queueEntity.exclude_from_indexes = ["body", "attachments", "context"] transportClass.validateQueueEntity( queueEntity) # Will raise an exception if the entity is not valid if utils.isLocalDevelopmentServer and not conf[ "viur.email.sendFromLocalDevelopmentServer"]: logging.info("Not sending email from local development server") logging.info("Subject: %s", queueEntity["subject"]) logging.info("Body: %s", queueEntity["body"]) logging.info("Recipients: %s", queueEntity["dests"]) return False db.Put(queueEntity) sendEmailDeferred(queueEntity.key, _queue="viur-emails") return True
def checkout(self, step=None, key=None, skey=None, *args, **kwargs): """ Performs the checkout process trough the state machine provided by self.steps. :param step: The current step index, None for beginning a new checkout :param key: Key of the current checkout :param skey: Server security key :return: Returns the rendered template or throws redirection exceptions. """ myKindName = self.viewSkel().kindName if step is None: logging.info("Starting new checkout process") billObj = db.Entity(myKindName) billObj["idx"] = "0000000" for state in self.states: billObj["state_%s" % state] = "0" db.Put(billObj) key = str(billObj.key()) # Copy the Cart if "amountSkel" in dir(self): cart = session.current.get("cart_products") or {} s = self.amountSkel() products = [] for prod, atts in cart.items(): for i in range(0, atts["amount"]): products.append(str(prod)) s.fromClient({"product": products}) s.toDB() session.current["order_" + myKindName] = { "key": str(key), "completedSteps": [] } session.current.markChanged() raise errors.Redirect("?step=0&key=%s" % str(key)) elif key: try: orderKey = db.Key(key) step = int(step) assert (step >= 0) assert (step < len(self.steps)) except: raise errors.NotAcceptable() sessionInfo = session.current.get("order_" + myKindName) if not sessionInfo or not sessionInfo.get("key") == str(orderKey): raise errors.Unauthorized() if step in sessionInfo["completedSteps"]: session.current["order_" + myKindName]["completedSteps"] = [ x for x in sessionInfo["completedSteps"] if x < step ] session.current.markChanged() # Make sure that no steps can be skipped if step != 0 and not step - 1 in sessionInfo["completedSteps"]: raise errors.Redirect("?step=0&key=%s" % str(str(orderKey))) currentStep = self.steps[step] if "preHandler" in currentStep.keys(): try: if isinstance(currentStep["preHandler"], list): for handler in currentStep["preHandler"]: handler(self, step, str(orderKey), *args, **kwargs) else: currentStep["preHandler"](self, step, str(orderKey), refkwargs=kwargs, *args, **kwargs) except SkipStepException: session.current["order_" + myKindName]["completedSteps"].append(step) session.current.markChanged() raise errors.Redirect("?step=%s&key=%s" % (str(step + 1), str(orderKey))) except ReturnHtmlException as e: return (e.html) if "requiresSecurityKey" in currentStep and currentStep[ "requiresSecurityKey"]: if not securitykey.validate(skey): raise errors.PreconditionFailed() pass if "mainHandler" in currentStep: if currentStep["mainHandler"]["action"] == "edit": skel = self.getSkelByName( currentStep["mainHandler"]["skeleton"], str(orderKey)) skel.fromDB(str(orderKey)) if not len(kwargs.keys()) or not skel.fromClient(kwargs): return (self.render.edit( skel, tpl=currentStep["mainHandler"]["template"], step=step)) skel.toDB() if currentStep["mainHandler"]["action"] == "view": if not "complete" in kwargs or not kwargs[ "complete"] == u"1": skel = self.getSkelByName( currentStep["mainHandler"]["skeleton"], str(orderKey)) skel.fromDB(str(orderKey)) return (self.render.view( skel, tpl=currentStep["mainHandler"]["template"], step=step)) elif currentStep["mainHandler"]["action"] == "function": res = currentStep["mainHandler"]["function"](self, step, str(orderKey), *args, **kwargs) if res: return res if "postHandler" in currentStep: currentStep["postHandler"](self, step, str(orderKey), *args, **kwargs) session.current["order_" + myKindName]["completedSteps"].append(step) session.current.markChanged() logging.info("next ?step=%s&key=%s" % (str(step + 1), str(orderKey))) raise errors.Redirect("?step=%s&key=%s" % (str(step + 1), str(orderKey)))
def cron(self, cronName="default", *args, **kwargs): global _callableTasks, _periodicTasks, _appengineServiceIPs req = currentRequest.get() if not req.isDevServer: if 'X-Appengine-Cron' not in req.request.headers: logging.critical( 'Detected an attempted XSRF attack. The header "X-AppEngine-Cron" was not set.' ) raise errors.Forbidden() if req.request.environ.get( "HTTP_X_APPENGINE_USER_IP") not in _appengineServiceIPs: logging.critical( 'Detected an attempted XSRF attack. This request did not originate from Cron.' ) raise errors.Forbidden() if cronName not in _periodicTasks: logging.warning( "Got Cron request '%s' which doesn't have any tasks" % cronName) for task, interval in _periodicTasks[cronName].items( ): # Call all periodic tasks bound to that queue periodicTaskName = task.periodicTaskName.lower() if interval: # Ensure this task doesn't get called to often lastCall = db.Get( db.Key("viur-task-interval", periodicTaskName)) if lastCall and utils.utcNow() - lastCall["date"] < timedelta( minutes=interval): logging.debug( "Skipping task %s - Has already run recently." % periodicTaskName) continue res = self.findBoundTask(task) try: if res: # Its bound, call it this way :) res[0]() else: task( ) # It seems it wasn't bound - call it as a static method except Exception as e: logging.error("Error calling periodic task %s", periodicTaskName) logging.exception(e) else: logging.debug("Successfully called task %s", periodicTaskName) if interval: # Update its last-call timestamp entry = db.Entity( db.Key("viur-task-interval", periodicTaskName)) entry["date"] = utils.utcNow() db.Put(entry) logging.debug("Periodic tasks complete") for currentTask in db.Query( "viur-queued-tasks").iter(): # Look for queued tasks db.Delete(currentTask.key()) if currentTask["taskid"] in _callableTasks: task = _callableTasks[currentTask["taskid"]]() tmpDict = {} for k in currentTask.keys(): if k == "taskid": continue tmpDict[k] = json.loads(currentTask[k]) try: task.execute(**tmpDict) except Exception as e: logging.error("Error executing Task") logging.exception(e) logging.debug("Scheduled tasks complete")
def postSavedHandler(self, skel, boneName, key): if not skel[boneName]: values = [] elif self.multiple and self.languages: values = chain(*skel[boneName].values()) elif self.languages: values = list(skel[boneName].values()) elif self.multiple: values = skel[boneName] else: values = [skel[boneName]] values = [x for x in values if x is not None] #elif isinstance(skel[boneName], dict): # values = [dict((k, v) for k, v in skel[boneName].items())] #else: # values = [dict((k, v) for k, v in x.items()) for x in skel[boneName]] parentValues = db.Entity() srcEntity = skel.dbEntity parentValues.key = srcEntity.key for boneKey in (self.parentKeys or []): parentValues[boneKey] = srcEntity.get(boneKey) dbVals = db.Query("viur-relations") dbVals.filter("viur_src_kind =", skel.kindName) dbVals.filter("viur_dest_kind =", self.kind) dbVals.filter("viur_src_property =", boneName) dbVals.filter("src.__key__ =", key) for dbObj in dbVals.iter(): try: if not dbObj["dest"].key in [x["dest"]["key"] for x in values]: # Relation has been removed db.Delete(dbObj.key) continue except: # This entry is corrupt db.Delete(dbObj.key) else: # Relation: Updated data = [x for x in values if x["dest"]["key"] == dbObj["dest"].key][0] # Write our (updated) values in refSkel = data["dest"] dbObj["dest"] = refSkel.serialize(parentIndexed=True) dbObj["src"] = parentValues if self.using is not None: usingSkel = data["rel"] dbObj["rel"] = usingSkel.serialize(parentIndexed=True) dbObj["viur_delayed_update_tag"] = time() dbObj["viur_relational_updateLevel"] = self.updateLevel dbObj["viur_relational_consistency"] = self.consistency.value dbObj["viur_foreign_keys"] = self.refKeys dbObj["viurTags"] = srcEntity.get("viurTags") # Copy tags over so we can still use our searchengine db.Put(dbObj) values.remove(data) # Add any new Relation for val in values: dbObj = db.Entity(db.Key("viur-relations", parent=key)) refSkel = val["dest"] dbObj["dest"] = refSkel.serialize(parentIndexed=True) dbObj["src"] = parentValues if self.using is not None: usingSkel = val["rel"] dbObj["rel"] = usingSkel.serialize(parentIndexed=True) dbObj["viur_delayed_update_tag"] = time() dbObj["viur_src_kind"] = skel.kindName # The kind of the entry referencing dbObj["viur_src_property"] = boneName # The key of the bone referencing dbObj["viur_dest_kind"] = self.kind dbObj["viur_relational_updateLevel"] = self.updateLevel dbObj["viur_relational_consistency"] = self.consistency.value dbObj["viur_foreign_keys"] = self.refKeys db.Put(dbObj)
def iterImport(module, target, exportKey, cursor=None, amount=0): """ Processes 100 Entries and calls the next batch """ urlfetch.set_default_fetch_deadline(20) payload = {"module": module, "key": exportKey} if cursor: payload.update({"cursor": cursor}) result = urlfetch.fetch( url=target, payload=urllib.urlencode(payload), method=urlfetch.POST, headers={'Content-Type': 'application/x-www-form-urlencoded'}) if result.status_code == 200: res = pickle.loads(result.content.decode("HEX")) skel = skeletonByKind(module)() logging.info("%s: %d new entries fetched, total %d entries fetched" % (module, len(res["values"]), amount)) if len(res["values"]) == 0: try: utils.sendEMailToAdmins( "Import of kind %s finished with %d entities" % (module, amount), "ViUR finished to import %d entities of " "kind %s from %s.\n" % (amount, module, target)) except: # OverQuota, whatever logging.error("Unable to send Email") return for entry in res["values"]: for k in list(entry.keys())[:]: if isinstance(entry[k], str): entry[k] = entry[k].decode("UTF-8") if not "key" in entry: entry["key"] = entry["id"] key = db.Key(encoded=utils.normalizeKey(entry["key"])) # Special case: Convert old module root nodes!!! if module.endswith( "_rootNode") and key.name() and "_modul_" in key.name(): name = key.name().replace("_modul_", "_module_") else: name = key.name() dbEntry = db.Entity(kind=key.kind(), parent=key.parent(), id=key.id(), name=name) for k in entry.keys(): if k == "key": continue dbEntry[k] = entry[k] # Special case: Convert old module root nodes!!! if (isinstance(skel, (HierarchySkel, TreeLeafSkel)) and k in ["parentdir", "parententry", "parentrepo"] and entry[k]): key = db.Key(encoded=str(entry[k])) if key.parent(): parent = db.Key( encoded=utils.normalizeKey(key.parent())) else: parent = None if key.id_or_name() and "_modul_" in str(key.id_or_name()): name = key.id_or_name().replace("_modul_", "_module_") else: name = key.id_or_name() dbEntry[k] = str( db.Key.from_path(key.kind(), name, parent=parent)) db.Put(dbEntry) skel.fromDB(str(dbEntry.key())) skel.refresh() skel.toDB(clearUpdateTag=True) amount += 1 iterImport(module, target, exportKey, res["cursor"], amount)