Esempio n. 1
0
    def testSerialiseDeserialise(self):
        payload = Payload(filt={"key": "PayloadResponseTest"})

        d = PayloadResponse(payload, timeout=0)
        d.addErrback(lambda _: True)  # Handle the errback

        vortexMsg = payload.toVortexMsg()
        Payload().fromVortexMsg(vortexMsg)
Esempio n. 2
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        modelSetKey = tupleSelector.selector["modelSetKey"]
        keys = tupleSelector.selector["keys"]

        keysByChunkKey = defaultdict(list)

        foundDocuments: List[DocumentTuple] = []

        for key in keys:
            keysByChunkKey[makeChunkKey(modelSetKey, key)].append(key)

        for chunkKey, subKeys in keysByChunkKey.items():
            chunk: DocDbEncodedChunk = self._cacheHandler.encodedChunk(
                chunkKey)

            if not chunk:
                logger.warning("Document chunk %s is missing from cache",
                               chunkKey)
                continue

            docsByKeyStr = Payload().fromEncodedPayload(
                chunk.encodedData).tuples[0]
            docsByKey = json.loads(docsByKeyStr)

            for subKey in subKeys:
                if subKey not in docsByKey:
                    logger.warning(
                        "Document %s is missing from index, chunkKey %s",
                        subKey, chunkKey)
                    continue

                # Reconstruct the data
                objectProps: {} = json.loads(docsByKey[subKey])

                # Get out the object type
                thisDocumentTypeId = objectProps['_dtid']
                del objectProps['_dtid']

                # Get out the object type
                thisModelSetId = objectProps['_msid']
                del objectProps['_msid']

                # Create the new object
                newObject = DocumentTuple()
                foundDocuments.append(newObject)

                newObject.key = subKey
                newObject.modelSet = DocDbModelSet(id=thisModelSetId)
                newObject.documentType = DocDbDocumentTypeTuple(
                    id=thisDocumentTypeId)
                newObject.document = objectProps

        # Create the vortex message
        return Payload(
            filt, tuples=foundDocuments).makePayloadEnvelope().toVortexMsg()
    def process(self, payload, vortexUuid=None, userAccess=None, **kwargs):
        from peek_server.PeekServerConfig import peekServerConfig
        from peek_server.server.auth import AuthValue
        from peek_server.storage.Setting import internalSetting, CAPABILITIES_KEY

        result = None
        from peek_server.storage import dbConn
        session = dbConn.ormSession

        # Force capabilities reload on page load
        peekServerConfig._capabilities = None

        # This is an update
        if payload.tuples:
            dataWrapTuple = payload.tuples[0]
            self._capabilities = None

            try:
                AuthValue.loadCapabilities(self, dataWrapTuple.data)
            except Exception as e:
                pass
            if self._capabilities is None:
                result = {
                    "success": False,
                    "message":
                    "The license entered is not valid for this server"
                }
                vortexSendPayload(Payload(filt=self._payloadFilter,
                                          result=result),
                                  vortexUuid=vortexUuid)
                return

            peekServerConfig._capabilities = self._capabilities

            internalSetting()[CAPABILITIES_KEY] = dataWrapTuple.data
            session.commit()
            result = {
                "success": True,
                "message": "The license was successfully loaded"
            }
        dataWrapTuple = DataWrapTuple()
        dataWrapTuple.data = peekServerConfig.capabilities

        vortexSendPayload(Payload(filt=self._payloadFilter,
                                  tuples=[dataWrapTuple],
                                  result=result),
                          vortexUuid=vortexUuid)

        navbarDataHandler.sendModelUpdate(vortexUuid=vortexUuid,
                                          userAccess=userAccess)
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        modelSetKey = tupleSelector.selector["modelSetKey"]
        keys = tupleSelector.selector["keys"]

        keysByChunkKey = defaultdict(list)

        foundLocationIndexes: List[LocationIndexTuple] = []

        for key in keys:
            keysByChunkKey[dispKeyHashBucket(modelSetKey, key)].append(key)

        for chunkKey, subKeys in keysByChunkKey.items():
            chunk: EncodedLocationIndexTuple = self._locationCache.encodedChunk(chunkKey)

            if not chunk:
                logger.warning("Location index chunk %s is missing from cache", chunkKey)
                continue

            jsonStr = Payload().fromEncodedPayload(chunk.encodedLocationIndexTuple).tuples[0].jsonStr
            locationsByKey = {i[0]: i[1:] for i in json.loads(jsonStr)}

            for subKey in subKeys:
                if subKey not in locationsByKey:
                    logger.warning(
                        "LocationIndex %s is missing from index, chunkKey %s",
                        subKey, chunkKey
                    )
                    continue

                # Reconstruct the data
                for locationJsonStr in locationsByKey[subKey]:
                    dispLocation = DispKeyLocationTuple.fromLocationJson(locationJsonStr)
                    foundLocationIndexes.append(dispLocation)

                    # Populate the coord set key
                    coordSet = self._coordSetCache.coordSetForId(dispLocation.coordSetId)

                    if coordSet is None:
                        logger.warning("Can not find coordSet with ID %s",
                                       dispLocation.coordSetId)
                        continue

                    dispLocation.coordSetKey = coordSet.key

        # Create the vortex message
        return Payload(filt,
                       tuples=foundLocationIndexes).makePayloadEnvelope().toVortexMsg()
    def sendDeleted(self, modelSetKey: str,
                    traceConfigKeys: List[str]) -> None:
        """ Send Deleted

        Send grid updates to the client services

        :param modelSetKey: The model set key
        :param traceConfigKeys: A list of object buckets that have been updated
        :returns: Nothing
        """

        if not traceConfigKeys:
            return

        if peekClientName not in VortexFactory.getRemoteVortexName():
            logger.debug("No clients are online to send the doc chunk to, %s",
                         traceConfigKeys)
            return

        payload = Payload(filt=copy(clientTraceConfigUpdateFromServerFilt))
        payload.filt[plDeleteKey] = True
        payload.tuples = dict(modelSetKey=modelSetKey,
                              traceConfigKeys=traceConfigKeys)

        payloadEnvelope = yield payload.makePayloadEnvelopeDefer()
        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()

        try:
            VortexFactory.sendVortexMsg(vortexMsg,
                                        destVortexName=peekClientName)

        except Exception as e:
            logger.exception(e)
Esempio n. 6
0
    def pushAction(self,
                   tupleAction: TupleActionABC,
                   additionalFilt=None) -> Deferred:
        """ Push Action

        This pushes the action, either locally or to the server, depending on the
        implementation.

        If pushed locally, the promise will resolve when the action has been saved.
        If pushed directly to the server, the promise will resolve when the server has
        responded.

        :param tupleAction The tuple action to send to the remote end
        :param additionalFilt: Any extra filt that may be required to send with the
                payload.
        """
        filt = copy(self._filt)
        if additionalFilt:
            filt.update(additionalFilt)

        d = Payload(filt=filt, tuples=[tupleAction]).makePayloadEnvelopeDefer()
        d.addCallback(lambda payloadEnvelope: PayloadResponse(
            payloadEnvelope, destVortexName=self._destVortexName))

        # Convert the data to TupleAction
        d.addCallback(
            lambda payloadEnvelope: payloadEnvelope.decodePayloadDefer())
        d.addCallback(lambda payload: payload.tuples)
        return d
    def getSegmentKeys(self, modelSetKey: str, vertexKey: str) -> List[str]:

        chunkKey = makeChunkKeyForItemKey(modelSetKey, vertexKey)
        # noinspection PyTypeChecker
        chunk: ItemKeyIndexEncodedChunk = self.encodedChunk(chunkKey)

        if not chunk:
            logger.warning("ItemKeyIndex chunk %s is missing from cache", chunkKey)
            return []

        resultsByKeyStr = Payload().fromEncodedPayload(chunk.encodedData).tuples[0]
        resultsByKey = json.loads(resultsByKeyStr)

        if vertexKey not in resultsByKey:
            logger.warning(
                "ItemKey %s is missing from index, chunkKey %s",
                vertexKey, chunkKey
            )
            return []

        packedJson = resultsByKey[vertexKey]

        segmentKeys = json.loads(packedJson)

        return segmentKeys
Esempio n. 8
0
    def _delete(self, session, tuples, filtId, payloadFilt) -> PayloadEnvelope:
        self._ext.beforeDelete(tuples, session, payloadFilt)

        if len(tuples):
            phIds = [t.id for t in tuples]
        else:
            phIds = [filtId]

        for phId in phIds:
            ph = self._getDeclarativeById(session, phId)
            try:
                # Try to iterate it
                for item in iter(ph):
                    session.delete(item)

            except TypeError:
                # If it's not an iterator
                if ph is not None:
                    session.delete(ph)

        session.commit()

        returnTuples: List[Tuple] = []
        if self._retreiveAll:
            returnTuples = self.createDeclarative(session, payloadFilt)

        self._ext.afterDeleteCommit(tuples, session, payloadFilt)
        return Payload(tuples=returnTuples).makePayloadEnvelope(result=True)
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        session = self._ormSessionCreator()
        try:
            Lookup = TUPLE_TYPES_BY_NAME[tupleSelector.name]

            if Lookup == DispLevel:
                all = (session.query(DispLevel).options(
                    joinedload(DispLevel.coordSet).joinedload(
                        ModelCoordSet.modelSet)).all())

                for item in all:
                    item.data = {"modelSetKey": item.coordSet.modelSet.key}

            else:
                all = (session.query(Lookup).options(
                    joinedload(Lookup.modelSet)).all())

                for item in all:
                    item.data = {"modelSetKey": item.modelSet.key}

            return Payload(filt,
                           tuples=all).makePayloadEnvelope().toVortexMsg()

        finally:
            session.close()
def createOrUpdateBranches(self, importBranchesEncodedPayload: bytes) -> None:
    """ Convert Import Branch Tuples

    This method takes import branch tuples, and converts them to
    branch format used throughout the diagram plugin.

    (Thats the packed JSON wrapped by an accessor class)

    """
    # Decode importBranches payload
    importBranches: List[ImportBranchTuple] = (
        Payload().fromEncodedPayload(importBranchesEncodedPayload).tuples)

    # Validate the input importBranches
    _validateNewBranchIndexs(importBranches)

    # Do the import
    groupedBranches = _convertImportBranchTuples(importBranches)

    startTime = datetime.now(pytz.utc)

    dbSession = CeleryDbConn.getDbSession()

    engine = CeleryDbConn.getDbEngine()
    conn = engine.connect()
    transaction = conn.begin()

    try:
        for (modelSetKey, modelSetId,
             coordSetId), branches in groupedBranches.items():
            _insertOrUpdateBranches(conn, modelSetKey, modelSetId, branches)

            newDisps, dispIdsToCompile = _convertBranchDisps(branches)

            # NO TRANSACTION
            # Bulk load the Disps
            _bulkInsertDisps(engine, newDisps)

            # Queue the compiler
            DispCompilerQueueController.queueDispIdsToCompileWithSession(
                dispIdsToCompile, conn)

            transaction.commit()
            dbSession.commit()

            logger.debug(
                "Completed importing %s branches for coordSetId %s in %s",
                len(branches), coordSetId,
                (datetime.now(pytz.utc) - startTime))

    except Exception as e:
        dbSession.rollback()
        transaction.rollback()
        logger.debug("Retrying createOrUpdateBranches, %s", e)
        logger.exception(e)
        raise self.retry(exc=e, countdown=3)

    finally:
        dbSession.close()
        conn.close()
def _buildIndex(chunkKeys) -> Dict[str, bytes]:
    session = CeleryDbConn.getDbSession()

    try:
        indexQry = (session.query(
            BranchIndex.chunkKey,
            BranchIndex.key, BranchIndex.packedJson).filter(
                BranchIndex.chunkKey.in_(chunkKeys)).order_by(
                    BranchIndex.key).yield_per(1000).all())

        # Create the ChunkKey -> {key -> packedJson, key -> packedJson, ....]
        packagedJsonsByObjKeyByChunkKey = defaultdict(
            lambda: defaultdict(list))

        for item in indexQry:
            packagedJsonsByObjKeyByChunkKey[item.chunkKey][item.key].append(
                item.packedJson)

        encPayloadByChunkKey = {}

        # Sort each bucket by the key
        for chunkKey, packedJsonsByKey in packagedJsonsByObjKeyByChunkKey.items(
        ):
            tuples = json.dumps(packedJsonsByKey, sort_keys=True)

            # Create the blob data for this index.
            # It will be index-blueprint by a binary sort
            encPayloadByChunkKey[chunkKey] = Payload(
                tuples=tuples).toEncodedPayload()

        return encPayloadByChunkKey

    finally:
        session.close()
def createOrUpdateTraceConfigs(self, traceConfigEncodedPayload: bytes
                               ) -> Dict[str, List[str]]:
    # Decode arguments
    newTraceConfigs: List[GraphDbTraceConfigTuple] = (
        Payload().fromEncodedPayload(traceConfigEncodedPayload).tuples
    )

    _validateNewTraceConfigs(newTraceConfigs)

    modelSetIdByKey = _loadModelSets()

    # Do the import
    try:
        insertedOrCreated: Dict[str, List[str]] = defaultdict(list)

        traceConfigByModelKey = defaultdict(list)
        for traceConfig in newTraceConfigs:
            traceConfigByModelKey[traceConfig.modelSetKey].append(traceConfig)
            insertedOrCreated[traceConfig.modelSetKey].append(traceConfig.key)

        for modelSetKey, traceConfigs in traceConfigByModelKey.items():
            modelSetId = modelSetIdByKey.get(modelSetKey)
            if modelSetId is None:
                modelSetId = _makeModelSet(modelSetKey)
                modelSetIdByKey[modelSetKey] = modelSetId

            _insertOrUpdateObjects(traceConfigs, modelSetId)

        return insertedOrCreated

    except Exception as e:
        logger.debug("Retrying import graphDb objects, %s", e)
        raise self.retry(exc=e, countdown=3)
Esempio n. 13
0
def createOrUpdateDocuments(self, documentsEncodedPayload: bytes) -> None:
    startTime = datetime.now(pytz.utc)
    # Decode arguments
    newDocuments: List[ImportDocumentTuple] = (
        Payload().fromEncodedPayload(documentsEncodedPayload).tuples)

    _validateNewDocuments(newDocuments)

    modelSetIdByKey = _loadModelSets()

    # Do the import
    try:

        documentByModelKey = defaultdict(list)
        for doc in newDocuments:
            documentByModelKey[doc.modelSetKey].append(doc)

        for modelSetKey, docs in documentByModelKey.items():
            modelSetId = modelSetIdByKey.get(modelSetKey)
            if modelSetId is None:
                modelSetId = _makeModelSet(modelSetKey)
                modelSetIdByKey[modelSetKey] = modelSetId

            docTypeIdsByName = _prepareLookups(docs, modelSetId)
            _insertOrUpdateObjects(docs, modelSetId, docTypeIdsByName)

        logger.info("Imported %s Documents in %s", len(newDocuments),
                    datetime.now(pytz.utc) - startTime)

    except Exception as e:
        logger.debug("Retrying import docDb objects, %s", e)
        raise self.retry(exc=e, countdown=3)
Esempio n. 14
0
def _buildIndex(chunkKeys) -> Dict[str, bytes]:
    session = CeleryDbConn.getDbSession()

    try:
        indexQry = (session.query(
            DocDbDocument.chunkKey, DocDbDocument.key,
            DocDbDocument.documentJson).filter(
                DocDbDocument.chunkKey.in_(chunkKeys)).order_by(
                    DocDbDocument.key).yield_per(1000).all())

        # Create the ChunkKey -> {id -> packedJson, id -> packedJson, ....]
        packagedJsonByObjIdByChunkKey = defaultdict(dict)

        for item in indexQry:
            packagedJsonByObjIdByChunkKey[item.chunkKey][
                item.key] = item.documentJson

        encPayloadByChunkKey = {}

        # Sort each bucket by the key
        for chunkKey, packedJsonByKey in packagedJsonByObjIdByChunkKey.items():
            tuples = json.dumps(packedJsonByKey, sort_keys=True)

            # Create the blob data for this index.
            # It will be docDbed by a binary sort
            encPayloadByChunkKey[chunkKey] = Payload(
                tuples=tuples).toEncodedPayload()

        return encPayloadByChunkKey

    finally:
        session.close()
Esempio n. 15
0
    def __call__(self, *args, **kwargs):
        """ Call 
        
        """
        yesMainThread()

        try:
            # FAKE Exception so we can raise a better stack trace later
            raise Exception()
        except:
            stack = sys.exc_info()[2]

        logger.debug("Calling RPC for %s", self.__funcName)

        payloadEnvelope = yield (Payload(
            filt=copy(self._filt),
            tuples=[_VortexRPCArgTuple(args=args, kwargs=kwargs)
                    ]).makePayloadEnvelopeDefer(compressionLevel=4))

        pr = PayloadResponse(payloadEnvelope,
                             timeout=self.__timeoutSeconds,
                             resultCheck=False,
                             logTimeoutError=False,
                             destVortexName=self.__listeningVortexName)

        # Delete the payload, we don't need to keep it in memory while we
        # get the result.
        del payloadEnvelope

        pr.addCallback(self._processResponseCallback, stack)
        pr.addErrback(self._processResponseErrback, stack)

        val = yield pr
        return val
Esempio n. 16
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Deferred:
        deviceToken = tupleSelector.selector["deviceToken"]

        session = self._dbSessionCreator()
        try:
            userLoggedIn = session.query(UserLoggedIn) \
                .filter(UserLoggedIn.deviceToken == deviceToken) \
                .one()

            internalUserTuple = session.query(InternalUserTuple) \
                .filter(InternalUserTuple.userName == userLoggedIn.userName) \
                .one()

            userDetails = UserListItemTuple(
                userId=internalUserTuple.userName,
                displayName=internalUserTuple.userTitle)

        except NoResultFound:
            userDetails = None

        finally:
            session.close()

        tuples = [
            UserLoggedInTuple(deviceToken=deviceToken, userDetails=userDetails)
        ]

        payloadEnvelope = Payload(filt=filt,
                                  tuples=tuples).makePayloadEnvelope()
        vortexMsg = payloadEnvelope.toVortexMsg()
        return vortexMsg
Esempio n. 17
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        userId = tupleSelector.selector["userId"]

        session = self._ormSessionCreator()
        try:
            tasks = (session.query(Task).filter(Task.userId == userId).options(
                subqueryload(Task.actions)).all())

            session.expunge_all()

            # Remove the data we don't want in the UI
            for task in tasks:
                task.onDeliveredPayloadEnvelope = None
                task.onCompletedPayloadEnvelope = None
                task.onDeletedPayloadEnvelope = None
                task.onDialogConfirmPayloadEnvelope = None
                for action in task.actions:
                    action.onActionPayloadEnvelope = None

            # Create the vortex message
            msg = Payload(filt,
                          tuples=tasks).makePayloadEnvelope().toVortexMsg()

        finally:
            session.close()

        return msg
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        tuples = [self._statusController.status]

        payloadEnvelope = yield Payload(filt, tuples=tuples).makePayloadEnvelopeDefer()
        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()
        return vortexMsg
def createOrUpdateSegments(self, segmentEncodedPayload: bytes) -> None:
    # Decode arguments
    newSegments: List[GraphDbImportSegmentTuple] = (
        Payload().fromEncodedPayload(segmentEncodedPayload).tuples)

    _validateNewSegments(newSegments)

    modelSetIdByKey = _loadModelSets()

    # Do the import
    try:

        segmentByModelKey = defaultdict(list)
        for segment in newSegments:
            segmentByModelKey[segment.modelSetKey].append(segment)

        for modelSetKey, segments in segmentByModelKey.items():
            modelSetId = modelSetIdByKey.get(modelSetKey)
            if modelSetId is None:
                modelSetId = _makeModelSet(modelSetKey)
                modelSetIdByKey[modelSetKey] = modelSetId

            _insertOrUpdateObjects(segments, modelSetId, modelSetKey)

    except Exception as e:
        logger.debug("Retrying import graphDb objects, %s", e)
        raise self.retry(exc=e, countdown=3)
    def _loadInPg(cls, plpy, filt: dict, tupleSelectorStr: str):
        tupleSelector = TupleSelector.fromJsonStr(tupleSelectorStr)
        tuples = cls.loadTuples(plpy, tupleSelector)

        payloadEnvelope = Payload(filt=filt, tuples=tuples).makePayloadEnvelope()
        vortexMsg = payloadEnvelope.toVortexMsg()
        return vortexMsg.decode()
Esempio n. 21
0
    def _payloadEncodeDecode(self, size):
        origPayload = makeTestPayloadA(size)
        origPayload.date = None

        encodedPayload = origPayload.toEncodedPayload()
        payload = Payload().fromEncodedPayload(encodedPayload)

        self.assertEqual(payload.tuples[0], origPayload.tuples[0])
Esempio n. 22
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        tuples = self._coordSetCacheController.coordSets

        payloadEnvelope = yield Payload(
            filt, tuples=tuples).makePayloadEnvelopeDefer()
        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()
        return vortexMsg
Esempio n. 23
0
    def _processCallCallback(self, result, sendResponseCallable, filt):

        payloadEnvelope = yield (Payload(
            filt=filt, tuples=[_VortexRPCResultTuple(result=result)
                               ]).makePayloadEnvelopeDefer())

        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()

        yield sendResponseCallable(vortexMsg, RPC_PRIORITY)
Esempio n. 24
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        # the UI doesn't supply the model set at present.
        modelSetKey = tupleSelector.selector.get("modelSetKey")

        data = self._cacheHandler.traceConfigTuples(modelSetKey=modelSetKey)

        # Create the vortex message
        return Payload(filt, tuples=data).makePayloadEnvelope().toVortexMsg()
Esempio n. 25
0
    def testPayloadToFromEncodedPayload10mb(self):
        # Create Payload
        origPayload = makeTestPayloadA()
        origPayload.tuples = ['1234567890' * 1024 ** 2]

        encodedPayload = origPayload.toEncodedPayload()
        payload = Payload().fromEncodedPayload(encodedPayload)

        self.assertEqual(payload.tuples[0], origPayload.tuples[0])
    def _getObjectsForChunkBlocking(self, chunkKey: str,
                                    objectTypeId: Optional[int],
                                    objectIds: List[int]
                                    ) -> List[SearchResultObjectTuple]:

        chunk = self.encodedChunk(chunkKey)
        if not chunk:
            return []

        objectPropsByIdStr = Payload().fromEncodedPayload(chunk.encodedData).tuples[0]
        objectPropsById = ujson.loads(objectPropsByIdStr)

        foundObjects: List[SearchResultObjectTuple] = []

        for objectId in objectIds:
            if str(objectId) not in objectPropsById:
                logger.warning(
                    "Search object id %s is missing from index, chunkKey %s",
                    objectId, chunkKey
                )
                continue

            # Reconstruct the data
            objectProps: {} = ujson.loads(objectPropsById[str(objectId)])

            # Get out the object type
            thisObjectTypeId = objectProps['_otid_']
            del objectProps['_otid_']

            # If the property is set, then make sure it matches
            if objectTypeId is not None and objectTypeId != thisObjectTypeId:
                continue

            # Get out the routes
            routes: List[List[str]] = objectProps['_r_']
            del objectProps['_r_']

            # Get the key
            objectKey: str = objectProps['key']

            # Create the new object
            newObject = SearchResultObjectTuple()
            foundObjects.append(newObject)

            newObject.id = objectId
            newObject.key = objectKey
            newObject.objectType = SearchObjectTypeTuple(id=thisObjectTypeId)
            newObject.properties = objectProps

            for route in routes:
                newRoute = SearchResultObjectRouteTuple()
                newObject.routes.append(newRoute)

                newRoute.title = route[0]
                newRoute.path = route[1]

        return foundObjects
Esempio n. 27
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        modelSetKey = tupleSelector.selector["modelSetKey"]
        coordSetId = tupleSelector.selector["coordSetId"]
        keys = tupleSelector.selector["keys"]

        keysByChunkKey = defaultdict(list)

        results: List[BranchTuple] = []

        for key in keys:
            keysByChunkKey[makeChunkKeyForBranchIndex(modelSetKey,
                                                      key)].append(key)

        for chunkKey, branchKeys in keysByChunkKey.items():
            chunk: BranchIndexEncodedChunk = self._cacheHandler.encodedChunk(
                chunkKey)

            if not chunk:
                logger.warning("BranchIndex chunk %s is missing from cache",
                               chunkKey)
                continue

            resultsByKeyStr = Payload().fromEncodedPayload(
                chunk.encodedData).tuples[0]
            resultsByKey = json.loads(resultsByKeyStr)

            for branchKey in branchKeys:
                if branchKey not in resultsByKey:
                    logger.warning(
                        "Branch %s is missing from index, chunkKey %s",
                        branchKey, chunkKey)
                    continue

                packedJsons = resultsByKey[branchKey]

                for packedJson in packedJsons:
                    result = BranchTuple.loadFromJson(packedJson, None, None)
                    if result.coordSetId == coordSetId or coordSetId is None:
                        results.append(result)

        # Create the vortex message
        return Payload(filt,
                       tuples=results).makePayloadEnvelope().toVortexMsg()
Esempio n. 28
0
    def testSuccesCb(self):
        payload = Payload(filt={"key": "PayloadResponseTest"})

        payloadReponse = PayloadResponse(payload)
        payloadReponse.addCallback(lambda pl: self.assertIs(pl, payload))
        payloadReponse.addErrback(lambda pl: self.assertTrue(False))

        PayloadIO().process(payload, "", "", None, lambda _: None)

        return payloadReponse
Esempio n. 29
0
        def sendChunk(toSend):
            if not toSend and not cacheAll:
                return

            payload = Payload(filt=filt, tuples=toSend)
            d: Deferred = payload.makePayloadEnvelopeDefer(compressionLevel=2)
            d.addCallback(
                lambda payloadEnvelope: payloadEnvelope.toVortexMsgDefer())
            d.addCallback(sendResponse)
            d.addErrback(vortexLogFailure, logger, consumeError=True)
Esempio n. 30
0
    def testTimeoutErrback(self):
        payload = Payload(filt={"key": "PayloadResponseTest"})

        payloadReponse = PayloadResponse(payload, timeout=3)
        payloadReponse.addCallback(lambda pl: self.assertFalse(True))
        self.assertFailure(payloadReponse, defer.TimeoutError)

        # PayloadIO().process(payload)

        return payloadReponse