def triggerSendNewsletter(self, key, skey, *args, **kwargs): if not securitykey.validate(skey): raise errors.PreconditionFailed() user = utils.getCurrentUser() if not (user and "root" in user["access"]): raise errors.Unauthorized() skel = self.viewSkel() if not skel.fromDB(key): raise errors.NotFound() if skel["triggered"] or skel["sent"]: raise errors.Forbidden("This newsletter was already sent.") try: setStatus(skel["key"], values={ "triggered": True, "triggereddate": datetime.datetime.now(), }, check={ "triggered": False, "sent": False }) except Exception as e: logging.exception(e) raise errors.Forbidden() self.fetchNewsletterRecipients(str(skel["key"])) return json.dumps("OKAY")
def storeEntry(self, e, key): if not self._checkKey(key, export=False): raise errors.Forbidden() entry = pickle.loads(e) for k in list(entry.keys())[:]: if isinstance(entry[k], str): entry[k] = entry[k].decode("UTF-8") if "key" in entry.keys(): key = db.Key(encoded=entry["key"]) elif "id" in entry.keys(): key = db.Key(encoded=entry["id"]) else: raise AttributeError() logging.error(key.kind()) logging.error(key.id()) logging.error(key.name()) dbEntry = db.Entity(kind=key.kind(), parent=key.parent(), id=key.id(), _app=key.app(), name=key.name()) #maybe some more fixes here ? for k in entry.keys(): if k != "key": val = entry[k] #if isinstance(val, dict) or isinstance(val, list): # val = pickle.dumps( val ) dbEntry[k] = val if dbEntry.key().id(): # Ensure the Datastore knows that it's id is in use datastore._GetConnection()._reserve_keys([dbEntry.key()]) db.Put(dbEntry)
def getEntry(self, module, id, key=None): if not self._checkKey(key, export=True): raise errors.Forbidden() res = db.Get(id) return pickle.dumps(self.genDict(res))
def index(self, *args, **kwargs): if not self.canUse(): raise errors.Forbidden() #Unauthorized skel = self.mailSkel() if len(kwargs) == 0: return self.render.add(skel=skel, failed=False) if not skel.fromClient(kwargs) or not "skey" in kwargs.keys(): return self.render.add(skel=skel, failed=True) if not securitykey.validate(kwargs["skey"]): raise errors.PreconditionFailed() # Allow bones to perform outstanding "magic" operations before sending the mail for key, _bone in skel.items(): if isinstance(_bone, baseBone): _bone.performMagic(skel.valuesCache, key, isAdd=True) # Get recipients rcpts = self.getRcpts(skel) # Get additional options for sendEMail opts = self.getOptions(skel) if not isinstance(opts, dict): opts = {} # Send the email! utils.sendEMail(rcpts, self.mailTemplate, skel, **opts) self.onItemAdded(skel) return self.render.addItemSuccess(skel)
def secondFactorSucceeded(self, secondFactor, userKey): logging.debug("Got SecondFactorSucceeded call from %s." % secondFactor) if str(session.current["_mayBeUserKey"]) != str(userKey): raise errors.Forbidden() return self.authenticateUser(userKey)
def getUploadURL( self, *args, **kwargs ): skey = kwargs.get("skey", "") if not self.canAdd("leaf", None): raise errors.Forbidden() if not securitykey.validate(skey): raise errors.PreconditionFailed() return blobstore.create_upload_url("%s/upload" % self.modulePath)
def login(self, skey="", *args, **kwargs): if users.get_current_user(): addSkel = skeletonByKind(self.userModule.addSkel().kindName) # Ensure that we have the full skeleton currentUser = users.get_current_user() uid = currentUser.user_id() userSkel = addSkel().all().filter("uid =", uid).getSkel() if not userSkel: # We'll try again - checking if there's already an user with that email userSkel = addSkel().all().filter("name.idx =", currentUser.email().lower()).getSkel() if not userSkel: # Still no luck - it's a completely new user if not self.registrationEnabled and not users.is_current_user_admin(): # Registration is disabled, it's a new user and that user is not admin logging.warning("Denying registration of %s", currentUser.email()) raise errors.Forbidden("Registration for new users is disabled") userSkel = addSkel() # We'll add a new user userSkel["uid"] = uid userSkel["name"] = currentUser.email() isAdd = True else: isAdd = False now = datetime.datetime.now() if isAdd or (now-userSkel["lastlogin"]) > datetime.timedelta(minutes=30): # Conserve DB-Writes: Update the user max once in 30 Minutes userSkel["lastlogin"] = now if users.is_current_user_admin(): if not userSkel["access"]: userSkel["access"] = [] if not "root" in userSkel["access"]: userSkel["access"].append("root") userSkel["gaeadmin"] = True else: userSkel["gaeadmin"] = False assert userSkel.toDB() return self.userModule.continueAuthenticationFlow(self, userSkel["key"]) raise errors.Redirect( users.create_login_url( self.modulePath+"/login") )
def storeEntry2(self, e, key): if not self._checkKey(key, export=False): raise errors.Forbidden() entry = pickle.loads(e.decode("HEX")) for k in list(entry.keys())[:]: if isinstance(entry[k], str): entry[k] = entry[k].decode("UTF-8") key = db.Key(encoded=utils.normalizeKey(entry["key"])) logging.info(key.kind()) logging.info(key.id()) logging.info(key.name()) dbEntry = db.Entity(kind=key.kind(), parent=key.parent(), id=key.id(), name=key.name()) #maybe some more fixes here ? for k in entry.keys(): if k != "key": val = entry[k] dbEntry[k] = val db.Put(dbEntry) try: skel = skeletonByKind(key.kind())() except: logging.error("Unknown Skeleton - skipping") skel.fromDB(str(dbEntry.key())) skel.refresh() skel.toDB(clearUpdateTag=True)
def getCfg(self, module, key): if not self._checkKey(key, export=False): raise errors.Forbidden() skel = skeletonByKind(module) assert skel is not None res = skel() r = DefaultRender() return (pickle.dumps(r.renderSkelStructure(res)))
def otp(self, otptoken=None, skey=None, *args, **kwargs): token = session.current.get("_otp_user") if not token: raise errors.Forbidden() if otptoken is None: self.userModule.render.edit(self.otpSkel()) if not securitykey.validate(skey): raise errors.PreconditionFailed() if token["failures"] > 3: raise errors.Forbidden( "Maximum amount of authentication retries exceeded") if len(token["otpkey"]) % 2 == 1: raise errors.PreconditionFailed( "The otp secret stored for this user is invalid (uneven length)" ) validTokens = self.generateOtps(token["otpkey"], token["otptimedrift"]) try: otptoken = int(otptoken) except: # We got a non-numeric token - this cant be correct self.userModule.render.edit(self.otpSkel(), tpl=self.otpTemplate) if otptoken in validTokens: userKey = session.current["_otp_user"]["uid"] del session.current["_otp_user"] session.current.markChanged() idx = validTokens.index(int(otptoken)) if abs(idx - self.windowSize) > 2: # The time-drift accumulates to more than 2 minutes, update our # clock-drift value accordingly self.updateTimeDrift(userKey, idx - self.windowSize) return self.userModule.secondFactorSucceeded(self, userKey) else: token["failures"] += 1 session.current["_otp_user"] = token session.current.markChanged() return self.userModule.render.edit(self.otpSkel(), loginFailed=True, tpl=self.otpTemplate)
def secondFactorSucceeded(self, secondFactor, userKey): logging.debug("Got SecondFactorSucceeded call from %s." % secondFactor) if str(session.current["_mayBeUserKey"]) != str(userKey): raise errors.Forbidden() # Assert that the second factor verification finished in time if datetime.datetime.now() - session.current[ "_secondFactorStart"] > self.secondFactorTimeWindow: raise errors.RequestTimeout() return self.authenticateUser(userKey)
def exportBlob(self, cursor=None, key=None): if not self._checkKey(key, export=True): raise errors.Forbidden() q = BlobInfo.all() if cursor is not None: q.with_cursor(cursor) r = [] for res in q.run(limit=16): r.append(str(res.key())) return (pickle.dumps({ "cursor": str(q.cursor()), "values": r }).encode("HEX"))
def hasblob(self, blobkey, key): if not self._checkKey(key, export=False): raise errors.Forbidden() try: oldKeyHash = sha256(blobkey).hexdigest().encode("hex") res = db.Get(db.Key.from_path("viur-blobimportmap", oldKeyHash)) if res: if "available" in res.keys(): return json.dumps(res["available"]) else: return json.dumps(True) except: pass return json.dumps(False)
def exportDb(self, cursor=None, key=None, *args, **kwargs): if not self._checkKey(key, export=True): raise errors.Forbidden() if cursor: c = datastore_query.Cursor(urlsafe=cursor) else: c = None q = datastore.Query(None, cursor=c) r = [] for res in q.Run(limit=5): r.append(self.genDict(res)) return (pickle.dumps({ "cursor": str(q.GetCursor().urlsafe()), "values": r }).encode("HEX"))
def iterValues2(self, module, cursor=None, key=None): if not self._checkKey(key, export=True): raise errors.Forbidden() q = db.Query(module) if cursor: q.cursor(cursor) r = [] for res in q.run(limit=32): r.append(self.genDict(res)) return pickle.dumps({ "cursor": str(q.getCursor().urlsafe()), "values": r }).encode("HEX")
def upload( self, node=None, *args, **kwargs ): try: canAdd = self.canAdd("leaf", node) except: canAdd = False if not canAdd: for upload in self.getUploads(): upload.delete() raise errors.Forbidden() try: res = [] if node: # The file is uploaded into a rootNode nodeSkel = self.editNodeSkel() if not nodeSkel.fromDB(node): for upload in self.getUploads(): upload.delete() raise errors.NotFound() else: weak = False parentDir = str(node) parentRepo = nodeSkel["parentrepo"] else: weak = True parentDir = None parentRepo = None # Handle the actual uploads for upload in self.getUploads(): fileName = self.decodeFileName(upload.filename) if str(upload.content_type).startswith("image/"): try: servingURL = images.get_serving_url(upload.key()) if request.current.get().isDevServer: # NOTE: changed for Ticket ADMIN-37 servingURL = urlparse(servingURL).path elif servingURL.startswith("http://"): # Rewrite Serving-URLs to https if we are live servingURL = servingURL.replace("http://", "https://") except: servingURL = "" else: servingURL = "" fileSkel = self.addLeafSkel() try: # only fetching the file header or all if the file is smaller than 1M data = blobstore.fetch_data(upload.key(), 0, min(upload.size, 1000000)) image = images.Image(image_data=data) height = image.height width = image.width except Exception, err: height = width = 0 logging.error( "some error occurred while trying to fetch the image header with dimensions") logging.exception(err) fileSkel.setValues( { "name": utils.escapeString(fileName), "size": upload.size, "mimetype": utils.escapeString(upload.content_type), "dlkey": str(upload.key()), "servingurl": servingURL, "parentdir": parentDir, "parentrepo": parentRepo, "weak": weak, "width": width, "height": height } ) fileSkel.toDB() res.append(fileSkel) self.onItemUploaded(fileSkel) # Uploads stored successfully, generate response to the client for r in res: logging.info("Upload successful: %s (%s)" % (r["name"], r["dlkey"])) user = utils.getCurrentUser() if user: logging.info("User: %s (%s)" % (user["name"], user["key"])) return( self.render.addItemSuccess( res ) )
def getUploadURL(self, key, *args, **kwargs): if not self._checkKey(key, export=False): raise errors.Forbidden() return (blobstore.create_upload_url("/dbtransfer/upload"))
def getAppId(self, key, *args, **kwargs): if not self._checkKey(key, export=False): raise errors.Forbidden() return (pickle.dumps(db.Query("SharedConfData").get().key().app()) ) #app_identity.get_application_id()
def deferred(self, *args, **kwargs): """ This catches one defered call and routes it to its destination """ from server import session from server import utils global _deferedTasks req = request.current.get().request if 'X-AppEngine-TaskName' not in req.headers: logging.critical( 'Detected an attempted XSRF attack. The header "X-AppEngine-Taskname" was not set.' ) raise errors.Forbidden() in_prod = (not req.environ.get("SERVER_SOFTWARE").startswith("Devel")) if in_prod and req.environ.get("REMOTE_ADDR") != "0.1.0.2": logging.critical( 'Detected an attempted XSRF attack. This request did not originate from Task Queue.' ) raise errors.Forbidden() # Check if the retry count exceeds our warning threshold retryCount = req.headers.get("X-Appengine-Taskretrycount", None) if retryCount: if int(retryCount) == self.retryCountWarningThreshold: utils.sendEMailToAdmins( "Deferred task retry count exceeded warning threshold", "Task %s will now be retried for the %sth time." % (req.headers.get("X-Appengine-Taskname", ""), retryCount)) cmd, data = json.loads(req.body) try: funcPath, args, kwargs, env = data except ValueError: #We got an old call without an frozen environment env = None funcPath, args, kwargs = data if env: if "user" in env and env["user"]: session.current["user"] = env["user"] if "lang" in env and env["lang"]: request.current.get().language = env["lang"] if "custom" in env and conf["viur.tasks.customEnvironmentHandler"]: # Check if we need to restore additional enviromental data assert isinstance(conf["viur.tasks.customEnvironmentHandler"], tuple) \ and len(conf["viur.tasks.customEnvironmentHandler"])==2 \ and callable(conf["viur.tasks.customEnvironmentHandler"][1]), \ "Your customEnvironmentHandler must be a tuple of two callable if set!" conf["viur.tasks.customEnvironmentHandler"][1](env["custom"]) if cmd == "rel": caller = conf["viur.mainApp"] pathlist = [x for x in funcPath.split("/") if x] for currpath in pathlist: if currpath not in dir(caller): logging.error( "ViUR missed a deferred task! Could not resolve the path %s. Failed segment was %s", funcPath, currpath) return caller = getattr(caller, currpath) try: caller(*args, **kwargs) except PermanentTaskFailure: pass except Exception as e: logging.exception(e) raise errors.RequestTimeout() #Task-API should retry elif cmd == "unb": if not funcPath in _deferedTasks: logging.error("Ive missed a defered task! %s(%s,%s)" % (funcPath, str(args), str(kwargs))) try: _deferedTasks[funcPath](*args, **kwargs) except PermanentTaskFailure: pass except Exception as e: logging.exception(e) raise errors.RequestTimeout() #Task-API should retry
def listModules(self, key): if not self._checkKey(key, export=False): raise errors.Forbidden() return (pickle.dumps(listKnownSkeletons()))