Example #1
0
 def createUploadURL(
         self, node: Union[str, None]) -> Tuple[str, str, Dict[str, str]]:
     global bucket
     targetKey = utils.generateRandomString()
     conditions = [["starts-with", "$key", "%s/source/" % targetKey]]
     if isinstance(credentials, ServiceAccountCredentials
                   ):  # We run locally with an service-account.json
         policy = bucket.generate_upload_policy(conditions)
     else:  # Use our fixed PolicyGenerator - Google is currently unable to create one itself on its GCE
         policy = self.generateUploadPolicy(conditions)
     uploadUrl = "https://%s.storage.googleapis.com" % bucket.name
     # Create a correspondingfile-lock object early, otherwise we would have to ensure that the file-lock object
     # the user creates matches the file he had uploaded
     fileSkel = self.addSkel(TreeType.Leaf)
     fileSkel["key"] = targetKey
     fileSkel["name"] = "pending"
     fileSkel["size"] = 0
     fileSkel["mimetype"] = "application/octetstream"
     fileSkel["dlkey"] = targetKey
     fileSkel["parentdir"] = None
     fileSkel["pendingparententry"] = db.keyHelper(
         node,
         self.addSkel(TreeType.Node).kindName) if node else None
     fileSkel["pending"] = True
     fileSkel["weak"] = True
     fileSkel["width"] = 0
     fileSkel["height"] = 0
     fileSkel.toDB()
     # Mark that entry dirty as we might never receive an add
     utils.markFileForDeletion(targetKey)
     return targetKey, uploadUrl, policy
Example #2
0
    def createUploadURL(
            self, node: Union[str, None]) -> Tuple[str, str, Dict[str, str]]:
        targetKey = utils.generateRandomString()
        conditions = [["starts-with", "$key", "%s/source/" % targetKey]]

        policy = bucket.generate_upload_policy(conditions)
        uploadUrl = "https://%s.storage.googleapis.com" % bucket.name

        # Create a correspondingfile-lock object early, otherwise we would have to ensure that the file-lock object
        # the user creates matches the file he had uploaded

        fileSkel = self.addLeafSkel()
        fileSkel["key"] = targetKey
        fileSkel["name"] = "pending"
        fileSkel["size"] = 0
        fileSkel["mimetype"] = "application/octetstream"
        fileSkel["dlkey"] = targetKey
        fileSkel["parentdir"] = None
        fileSkel["pendingParentdir"] = db.keyHelper(
            node,
            self.addNodeSkel().kindName) if node else None
        fileSkel["pending"] = True
        fileSkel["weak"] = True
        fileSkel["width"] = 0
        fileSkel["height"] = 0
        fileSkel[""] = ""
        fileSkel.toDB()
        # Mark that entry dirty as we might never receive an add
        utils.markFileForDeletion(targetKey)
        return targetKey, uploadUrl, policy
		def updateInplace(relDict):
			"""
				Fetches the entity referenced by valDict["dest.key"] and updates all dest.* keys
				accordingly
			"""
			if not (isinstance(relDict, dict) and "dest" in relDict):
				logging.error("Invalid dictionary in updateInplace: %s" % relDict)
				return
			newValues = db.Get(db.keyHelper(relDict["dest"]["key"], self.kind))
			if newValues is None:
				logging.info("The key %s does not exist" % relDict["dest"]["key"])
				return
			for boneName in self.refKeys:
				if boneName != "key" and boneName in newValues:
					relDict["dest"].dbEntity[boneName] = newValues[boneName]
Example #4
0
    def getRootNode(self, entryKey: db.Key) -> Skeleton:
        """
		Returns the root-node for a given child.

		:param entryKey: URL-Safe key of the child entry
		:type entryKey: str

		:returns: The entity of the root-node.
		:rtype: :class:`server.db.Entity`
		"""
        rootNodeSkel = self.nodeSkelCls()
        entryKey = db.keyHelper(entryKey, rootNodeSkel.kindName)
        repo = db.Get(entryKey)
        while repo and "parententry" in repo:
            repo = db.Get(repo["parententry"])
        rootNodeSkel.fromDB(repo.key)
        return rootNodeSkel
	def createRelSkelFromKey(self, key: Union[str, db.KeyClass], rel: Union[dict, None] = None):
		"""
			Creates a relSkel instance valid for this bone from the given database key.
		"""
		key = db.keyHelper(key, self.kind)
		entity = db.Get(key)
		if not entity:
			logging.error("Key %s not found" % str(key))
			return None
		relSkel = self._refSkelCache()
		relSkel.unserialize(entity)
		for k in relSkel.keys():
			# Unserialize all bones from refKeys, then drop dbEntity - otherwise all properties will be copied
			_ = relSkel[k]
		relSkel.dbEntity = None
		return {
			"dest": relSkel,
			"rel": rel or None
		}
		def restoreSkels(key, usingData, index=None):
			refSkel, usingSkel = self._getSkels()
			isEntryFromBackup = False  # If the referenced entry has been deleted, restore information from backup
			entry = None
			dbKey = None
			errors = []
			try:
				dbKey = db.keyHelper(key, self.kind)
				entry = db.Get(dbKey)
				assert entry
			except:  # Invalid key or something like that
				logging.info("Invalid reference key >%s< detected on bone '%s'",
							 key, name)
				if isinstance(oldValues, dict):
					if oldValues["dest"].key == dbKey:
						entry = oldValues["dest"]
						isEntryFromBackup = True
				elif isinstance(oldValues, list):
					for dbVal in oldValues:
						if dbVal["dest"].key == dbKey:
							entry = dbVal["dest"]
							isEntryFromBackup = True
			if isEntryFromBackup:
				refSkel = entry
			elif entry:
				refSkel.dbEntity = entry
				for k in refSkel.keys():
					# Unserialize all bones from refKeys, then drop dbEntity - otherwise all properties will be copied
					_ = refSkel[k]
				refSkel.dbEntity = None
			else:
				if index:
					errors.append(
						ReadFromClientError(ReadFromClientErrorSeverity.Invalid, "%s.%s" % (name, index),
											"Invalid value submitted"))
				else:
					errors.append(
						ReadFromClientError(ReadFromClientErrorSeverity.Invalid, name, "Invalid value submitted"))
				return None, None, errors  # We could not parse this
			if usingSkel:
				if not usingSkel.fromClient(usingData):
					errors.extend(usingSkel.errors)
			return refSkel, usingSkel, errors
Example #7
0
    def initializeUpload(self,
                         fileName: str,
                         mimeType: str,
                         node: Union[str, None],
                         size: Union[int, None] = None) -> Tuple[str, str]:
        """
		Internal helper that registers a new upload. Will create the pending fileSkel entry (needed to remove any
		started uploads from GCS if that file isn't used) and creates a resumable (and signed) uploadURL for that.
		:param fileName: Name of the file that will be uploaded
		:param mimeType: Mimetype of said file
		:param node: If set (to a string-key representation of a file-node) the upload will be written to this directory
		:param size: The *exact* filesize we're accepting in Bytes. Used to enforce a filesize limit by getUploadURL
		:return: Str-Key of the new file-leaf entry, the signed upload-url
		"""
        global bucket
        fileName = sanitizeFileName(fileName)
        targetKey = utils.generateRandomString()
        blob = bucket.blob("%s/source/%s" % (targetKey, fileName))
        uploadUrl = blob.create_resumable_upload_session(content_type=mimeType,
                                                         size=size,
                                                         timeout=60)
        # Create a corresponding file-lock object early, otherwise we would have to ensure that the file-lock object
        # the user creates matches the file he had uploaded
        fileSkel = self.addSkel("leaf")
        fileSkel["name"] = "pending"
        fileSkel["size"] = 0
        fileSkel["mimetype"] = "application/octetstream"
        fileSkel["dlkey"] = targetKey
        fileSkel["parentdir"] = None
        fileSkel["pendingparententry"] = db.keyHelper(
            node,
            self.addSkel("node").kindName) if node else None
        fileSkel["pending"] = True
        fileSkel["weak"] = True
        fileSkel["width"] = 0
        fileSkel["height"] = 0
        fileSkel.toDB()
        # Mark that entry dirty as we might never receive an add
        utils.markFileForDeletion(targetKey)
        return db.encodeKey(fileSkel["key"]), uploadUrl
Example #8
0
    def deleteRecursive(self, parentKey):
        """
		Recursively processes a delete request.

		This will delete all entries which are children of *nodeKey*, except *key* nodeKey.

		:param key: URL-safe key of the node which children should be deleted.
		:type key: str
		"""
        nodeKey = db.keyHelper(parentKey, self.viewSkel("node").kindName)
        if self.leafSkelCls:
            for leaf in db.Query(self.viewSkel("leaf").kindName).filter(
                    "parententry =", nodeKey).iter():
                leafSkel = self.viewSkel("leaf")
                if not leafSkel.fromDB(leaf.key):
                    continue
                leafSkel.delete()
        for node in db.Query(self.viewSkel("node").kindName).filter(
                "parententry =", nodeKey).iter():
            self.deleteRecursive(node.key)
            nodeSkel = self.viewSkel("node")
            if not nodeSkel.fromDB(node.key):
                continue
            nodeSkel.delete()
Example #9
0
    def deleteRecursive(self, nodeKey):
        """
		Recursively processes a delete request.

		This will delete all entries which are children of *nodeKey*, except *key* nodeKey.

		:param key: URL-safe key of the node which children should be deleted.
		:type key: str
		"""
        nodeKey = db.keyHelper(nodeKey, self.viewSkel("node").kindName)
        if self.leafSkelCls:
            for f in db.Query(self.viewSkel("leaf").kindName).filter(
                    "parententry =", nodeKey).iter(keysOnly=True):
                s = self.viewSkel("leaf")
                if not s.fromDB(f):
                    continue
                s.delete()
        for d in db.Query(self.viewSkel("node").kindName).filter(
                "parententry =", nodeKey).iter(keysOnly=True):
            self.deleteRecursive(str(d))
            s = self.viewSkel("node")
            if not s.fromDB(d):
                continue
            s.delete()