Exemplo n.º 1
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Deferred:
        deviceToken = tupleSelector.selector["deviceToken"]

        session = self._dbSessionCreator()
        try:
            userLoggedIn = session.query(UserLoggedIn) \
                .filter(UserLoggedIn.deviceToken == deviceToken) \
                .one()

            internalUserTuple = session.query(InternalUserTuple) \
                .filter(InternalUserTuple.userName == userLoggedIn.userName) \
                .one()

            userDetails = UserListItemTuple(
                userId=internalUserTuple.userName,
                displayName=internalUserTuple.userTitle)

        except NoResultFound:
            userDetails = None

        finally:
            session.close()

        tuples = [
            UserLoggedInTuple(deviceToken=deviceToken, userDetails=userDetails)
        ]

        payloadEnvelope = Payload(filt=filt,
                                  tuples=tuples).makePayloadEnvelope()
        vortexMsg = payloadEnvelope.toVortexMsg()
        return vortexMsg
    def sendDeleted(self, modelSetKey: str,
                    traceConfigKeys: List[str]) -> None:
        """ Send Deleted

        Send grid updates to the client services

        :param modelSetKey: The model set key
        :param traceConfigKeys: A list of object buckets that have been updated
        :returns: Nothing
        """

        if not traceConfigKeys:
            return

        if peekClientName not in VortexFactory.getRemoteVortexName():
            logger.debug("No clients are online to send the doc chunk to, %s",
                         traceConfigKeys)
            return

        payload = Payload(filt=copy(clientTraceConfigUpdateFromServerFilt))
        payload.filt[plDeleteKey] = True
        payload.tuples = dict(modelSetKey=modelSetKey,
                              traceConfigKeys=traceConfigKeys)

        payloadEnvelope = yield payload.makePayloadEnvelopeDefer()
        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()

        try:
            VortexFactory.sendVortexMsg(vortexMsg,
                                        destVortexName=peekClientName)

        except Exception as e:
            logger.exception(e)
    def _loadInPg(cls, plpy, filt: dict, tupleSelectorStr: str):
        tupleSelector = TupleSelector.fromJsonStr(tupleSelectorStr)
        tuples = cls.loadTuples(plpy, tupleSelector)

        payloadEnvelope = Payload(filt=filt, tuples=tuples).makePayloadEnvelope()
        vortexMsg = payloadEnvelope.toVortexMsg()
        return vortexMsg.decode()
Exemplo n.º 4
0
    def testSerialiseDeserialise(self):
        payload = Payload(filt={"key": "PayloadResponseTest"})

        d = PayloadResponse(payload, timeout=0)
        d.addErrback(lambda _: True)  # Handle the errback

        vortexMsg = payload.toVortexMsg()
        Payload().fromVortexMsg(vortexMsg)
Exemplo n.º 5
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        modelSetKey = tupleSelector.selector["modelSetKey"]
        keys = tupleSelector.selector["keys"]

        keysByChunkKey = defaultdict(list)

        foundDocuments: List[DocumentTuple] = []

        for key in keys:
            keysByChunkKey[makeChunkKey(modelSetKey, key)].append(key)

        for chunkKey, subKeys in keysByChunkKey.items():
            chunk: DocDbEncodedChunk = self._cacheHandler.encodedChunk(
                chunkKey)

            if not chunk:
                logger.warning("Document chunk %s is missing from cache",
                               chunkKey)
                continue

            docsByKeyStr = Payload().fromEncodedPayload(
                chunk.encodedData).tuples[0]
            docsByKey = json.loads(docsByKeyStr)

            for subKey in subKeys:
                if subKey not in docsByKey:
                    logger.warning(
                        "Document %s is missing from index, chunkKey %s",
                        subKey, chunkKey)
                    continue

                # Reconstruct the data
                objectProps: {} = json.loads(docsByKey[subKey])

                # Get out the object type
                thisDocumentTypeId = objectProps['_dtid']
                del objectProps['_dtid']

                # Get out the object type
                thisModelSetId = objectProps['_msid']
                del objectProps['_msid']

                # Create the new object
                newObject = DocumentTuple()
                foundDocuments.append(newObject)

                newObject.key = subKey
                newObject.modelSet = DocDbModelSet(id=thisModelSetId)
                newObject.documentType = DocDbDocumentTypeTuple(
                    id=thisDocumentTypeId)
                newObject.document = objectProps

        # Create the vortex message
        return Payload(
            filt, tuples=foundDocuments).makePayloadEnvelope().toVortexMsg()
Exemplo n.º 6
0
        def sendChunk(toSend):
            if not toSend and not cacheAll:
                return

            payload = Payload(filt=filt, tuples=toSend)
            d: Deferred = payload.makePayloadEnvelopeDefer(compressionLevel=2)
            d.addCallback(
                lambda payloadEnvelope: payloadEnvelope.toVortexMsgDefer())
            d.addCallback(sendResponse)
            d.addErrback(vortexLogFailure, logger, consumeError=True)
Exemplo n.º 7
0
 def makeVortexMsg(self, filt: dict,
                   tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
     tuple_ = DocumentUpdateDateTuple()
     tuple_.updateDateByChunkKey = {
         key:self._cacheHandler.encodedChunk(key).lastUpdate
         for key in self._cacheHandler.encodedChunkKeys()
     }
     payload = Payload(filt, tuples=[tuple_])
     payloadEnvelope = yield payload.makePayloadEnvelopeDefer()
     vortexMsg = yield payloadEnvelope.toVortexMsg()
     return vortexMsg
Exemplo n.º 8
0
    def makeVortexMsg(self, filt: dict, tupleSelector: TupleSelector) -> Deferred:
        dbSession = self._dbSessionCreator()
        try:
            tuples = dbSession.query(EventDBModelSetTable).all()

        finally:
            dbSession.close()

        payloadEnvelope = Payload(filt=filt, tuples=tuples).makePayloadEnvelope()
        vortexMsg = payloadEnvelope.toVortexMsg()
        return vortexMsg
Exemplo n.º 9
0
 def _retrieve(self,
               session,
               filtId,
               payloadFilt,
               obj=None,
               **kwargs) -> PayloadEnvelope:
     ph = obj if obj else self._getDeclarativeById(session, filtId)
     payload = Payload()
     payload.tuples = [ph] if ph else []
     self._ext.afterRetrieve(payload.tuples, session, payloadFilt)
     return payload.makePayloadEnvelope()
Exemplo n.º 10
0
    def pushAction(self,
                   tupleAction: TupleActionABC,
                   additionalFilt=None) -> Deferred:
        """ Push Action

        This pushes the action, either locally or to the server, depending on the
        implementation.

        If pushed locally, the promise will resolve when the action has been saved.
        If pushed directly to the server, the promise will resolve when the server has
        responded.

        :param tupleAction The tuple action to send to the remote end
        :param additionalFilt: Any extra filt that may be required to send with the
                payload.
        """
        filt = copy(self._filt)
        if additionalFilt:
            filt.update(additionalFilt)

        d = Payload(filt=filt, tuples=[tupleAction]).makePayloadEnvelopeDefer()
        d.addCallback(lambda payloadEnvelope: PayloadResponse(
            payloadEnvelope, destVortexName=self._destVortexName))

        # Convert the data to TupleAction
        d.addCallback(
            lambda payloadEnvelope: payloadEnvelope.decodePayloadDefer())
        d.addCallback(lambda payload: payload.tuples)
        return d
    def process(self, payload, vortexUuid=None, userAccess=None, **kwargs):
        from peek_server.PeekServerConfig import peekServerConfig
        from peek_server.server.auth import AuthValue
        from peek_server.storage.Setting import internalSetting, CAPABILITIES_KEY

        result = None
        from peek_server.storage import dbConn
        session = dbConn.ormSession

        # Force capabilities reload on page load
        peekServerConfig._capabilities = None

        # This is an update
        if payload.tuples:
            dataWrapTuple = payload.tuples[0]
            self._capabilities = None

            try:
                AuthValue.loadCapabilities(self, dataWrapTuple.data)
            except Exception as e:
                pass
            if self._capabilities is None:
                result = {
                    "success": False,
                    "message":
                    "The license entered is not valid for this server"
                }
                vortexSendPayload(Payload(filt=self._payloadFilter,
                                          result=result),
                                  vortexUuid=vortexUuid)
                return

            peekServerConfig._capabilities = self._capabilities

            internalSetting()[CAPABILITIES_KEY] = dataWrapTuple.data
            session.commit()
            result = {
                "success": True,
                "message": "The license was successfully loaded"
            }
        dataWrapTuple = DataWrapTuple()
        dataWrapTuple.data = peekServerConfig.capabilities

        vortexSendPayload(Payload(filt=self._payloadFilter,
                                  tuples=[dataWrapTuple],
                                  result=result),
                          vortexUuid=vortexUuid)

        navbarDataHandler.sendModelUpdate(vortexUuid=vortexUuid,
                                          userAccess=userAccess)
Exemplo n.º 12
0
    def _callback(self, result, replyFilt: dict, tupleName: str,
                  sendResponse: SendVortexMsgResponseCallable):

        if not isinstance(result, list):
            result = [result]
        payload = Payload(filt=replyFilt, tuples=result)

        payloadEnvelope = yield payload.makePayloadEnvelopeDefer()
        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()
        try:
            yield sendResponse(vortexMsg)

        except Exception as e:
            logger.error("Failed to send TupleAction response for %s", tupleName)
            logger.exception(e)
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        modelSetKey = tupleSelector.selector["modelSetKey"]
        keys = tupleSelector.selector["keys"]

        keysByChunkKey = defaultdict(list)

        foundLocationIndexes: List[LocationIndexTuple] = []

        for key in keys:
            keysByChunkKey[dispKeyHashBucket(modelSetKey, key)].append(key)

        for chunkKey, subKeys in keysByChunkKey.items():
            chunk: EncodedLocationIndexTuple = self._locationCache.encodedChunk(chunkKey)

            if not chunk:
                logger.warning("Location index chunk %s is missing from cache", chunkKey)
                continue

            jsonStr = Payload().fromEncodedPayload(chunk.encodedLocationIndexTuple).tuples[0].jsonStr
            locationsByKey = {i[0]: i[1:] for i in json.loads(jsonStr)}

            for subKey in subKeys:
                if subKey not in locationsByKey:
                    logger.warning(
                        "LocationIndex %s is missing from index, chunkKey %s",
                        subKey, chunkKey
                    )
                    continue

                # Reconstruct the data
                for locationJsonStr in locationsByKey[subKey]:
                    dispLocation = DispKeyLocationTuple.fromLocationJson(locationJsonStr)
                    foundLocationIndexes.append(dispLocation)

                    # Populate the coord set key
                    coordSet = self._coordSetCache.coordSetForId(dispLocation.coordSetId)

                    if coordSet is None:
                        logger.warning("Can not find coordSet with ID %s",
                                       dispLocation.coordSetId)
                        continue

                    dispLocation.coordSetKey = coordSet.key

        # Create the vortex message
        return Payload(filt,
                       tuples=foundLocationIndexes).makePayloadEnvelope().toVortexMsg()
Exemplo n.º 14
0
def _buildIndex(chunkKeys) -> Dict[str, bytes]:
    session = CeleryDbConn.getDbSession()

    try:
        indexQry = (session.query(
            DocDbDocument.chunkKey, DocDbDocument.key,
            DocDbDocument.documentJson).filter(
                DocDbDocument.chunkKey.in_(chunkKeys)).order_by(
                    DocDbDocument.key).yield_per(1000).all())

        # Create the ChunkKey -> {id -> packedJson, id -> packedJson, ....]
        packagedJsonByObjIdByChunkKey = defaultdict(dict)

        for item in indexQry:
            packagedJsonByObjIdByChunkKey[item.chunkKey][
                item.key] = item.documentJson

        encPayloadByChunkKey = {}

        # Sort each bucket by the key
        for chunkKey, packedJsonByKey in packagedJsonByObjIdByChunkKey.items():
            tuples = json.dumps(packedJsonByKey, sort_keys=True)

            # Create the blob data for this index.
            # It will be docDbed by a binary sort
            encPayloadByChunkKey[chunkKey] = Payload(
                tuples=tuples).toEncodedPayload()

        return encPayloadByChunkKey

    finally:
        session.close()
Exemplo n.º 15
0
def createOrUpdateDocuments(self, documentsEncodedPayload: bytes) -> None:
    startTime = datetime.now(pytz.utc)
    # Decode arguments
    newDocuments: List[ImportDocumentTuple] = (
        Payload().fromEncodedPayload(documentsEncodedPayload).tuples)

    _validateNewDocuments(newDocuments)

    modelSetIdByKey = _loadModelSets()

    # Do the import
    try:

        documentByModelKey = defaultdict(list)
        for doc in newDocuments:
            documentByModelKey[doc.modelSetKey].append(doc)

        for modelSetKey, docs in documentByModelKey.items():
            modelSetId = modelSetIdByKey.get(modelSetKey)
            if modelSetId is None:
                modelSetId = _makeModelSet(modelSetKey)
                modelSetIdByKey[modelSetKey] = modelSetId

            docTypeIdsByName = _prepareLookups(docs, modelSetId)
            _insertOrUpdateObjects(docs, modelSetId, docTypeIdsByName)

        logger.info("Imported %s Documents in %s", len(newDocuments),
                    datetime.now(pytz.utc) - startTime)

    except Exception as e:
        logger.debug("Retrying import docDb objects, %s", e)
        raise self.retry(exc=e, countdown=3)
Exemplo n.º 16
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        userId = tupleSelector.selector["userId"]

        session = self._ormSessionCreator()
        try:
            tasks = (session.query(Task).filter(Task.userId == userId).options(
                subqueryload(Task.actions)).all())

            session.expunge_all()

            # Remove the data we don't want in the UI
            for task in tasks:
                task.onDeliveredPayloadEnvelope = None
                task.onCompletedPayloadEnvelope = None
                task.onDeletedPayloadEnvelope = None
                task.onDialogConfirmPayloadEnvelope = None
                for action in task.actions:
                    action.onActionPayloadEnvelope = None

            # Create the vortex message
            msg = Payload(filt,
                          tuples=tasks).makePayloadEnvelope().toVortexMsg()

        finally:
            session.close()

        return msg
def createOrUpdateSegments(self, segmentEncodedPayload: bytes) -> None:
    # Decode arguments
    newSegments: List[GraphDbImportSegmentTuple] = (
        Payload().fromEncodedPayload(segmentEncodedPayload).tuples)

    _validateNewSegments(newSegments)

    modelSetIdByKey = _loadModelSets()

    # Do the import
    try:

        segmentByModelKey = defaultdict(list)
        for segment in newSegments:
            segmentByModelKey[segment.modelSetKey].append(segment)

        for modelSetKey, segments in segmentByModelKey.items():
            modelSetId = modelSetIdByKey.get(modelSetKey)
            if modelSetId is None:
                modelSetId = _makeModelSet(modelSetKey)
                modelSetIdByKey[modelSetKey] = modelSetId

            _insertOrUpdateObjects(segments, modelSetId, modelSetKey)

    except Exception as e:
        logger.debug("Retrying import graphDb objects, %s", e)
        raise self.retry(exc=e, countdown=3)
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Deferred:
        # TODO HACK - Hard coded for mobile users
        mobileGroup = yield self._mobileGroup()
        users = yield self._ourApi.infoApi.users(groupNames=[mobileGroup])

        tuples = []
        for userDetails in users:
            tuples.append(
                UserListItemTuple(userId=userDetails.userName,
                                  displayName=userDetails.userTitle))

        payload = Payload(filt=filt, tuples=tuples)
        paylodEnvelope = yield payload.makePayloadEnvelopeDefer()
        vortexMsg = yield paylodEnvelope.toVortexMsgDefer()
        return vortexMsg
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        session = self._ormSessionCreator()
        try:
            Lookup = TUPLE_TYPES_BY_NAME[tupleSelector.name]

            if Lookup == DispLevel:
                all = (session.query(DispLevel).options(
                    joinedload(DispLevel.coordSet).joinedload(
                        ModelCoordSet.modelSet)).all())

                for item in all:
                    item.data = {"modelSetKey": item.coordSet.modelSet.key}

            else:
                all = (session.query(Lookup).options(
                    joinedload(Lookup.modelSet)).all())

                for item in all:
                    item.data = {"modelSetKey": item.modelSet.key}

            return Payload(filt,
                           tuples=all).makePayloadEnvelope().toVortexMsg()

        finally:
            session.close()
Exemplo n.º 20
0
    def _delete(self, session, tuples, filtId, payloadFilt) -> PayloadEnvelope:
        self._ext.beforeDelete(tuples, session, payloadFilt)

        if len(tuples):
            phIds = [t.id for t in tuples]
        else:
            phIds = [filtId]

        for phId in phIds:
            ph = self._getDeclarativeById(session, phId)
            try:
                # Try to iterate it
                for item in iter(ph):
                    session.delete(item)

            except TypeError:
                # If it's not an iterator
                if ph is not None:
                    session.delete(ph)

        session.commit()

        returnTuples: List[Tuple] = []
        if self._retreiveAll:
            returnTuples = self.createDeclarative(session, payloadFilt)

        self._ext.afterDeleteCommit(tuples, session, payloadFilt)
        return Payload(tuples=returnTuples).makePayloadEnvelope(result=True)
def createOrUpdateBranches(self, importBranchesEncodedPayload: bytes) -> None:
    """ Convert Import Branch Tuples

    This method takes import branch tuples, and converts them to
    branch format used throughout the diagram plugin.

    (Thats the packed JSON wrapped by an accessor class)

    """
    # Decode importBranches payload
    importBranches: List[ImportBranchTuple] = (
        Payload().fromEncodedPayload(importBranchesEncodedPayload).tuples)

    # Validate the input importBranches
    _validateNewBranchIndexs(importBranches)

    # Do the import
    groupedBranches = _convertImportBranchTuples(importBranches)

    startTime = datetime.now(pytz.utc)

    dbSession = CeleryDbConn.getDbSession()

    engine = CeleryDbConn.getDbEngine()
    conn = engine.connect()
    transaction = conn.begin()

    try:
        for (modelSetKey, modelSetId,
             coordSetId), branches in groupedBranches.items():
            _insertOrUpdateBranches(conn, modelSetKey, modelSetId, branches)

            newDisps, dispIdsToCompile = _convertBranchDisps(branches)

            # NO TRANSACTION
            # Bulk load the Disps
            _bulkInsertDisps(engine, newDisps)

            # Queue the compiler
            DispCompilerQueueController.queueDispIdsToCompileWithSession(
                dispIdsToCompile, conn)

            transaction.commit()
            dbSession.commit()

            logger.debug(
                "Completed importing %s branches for coordSetId %s in %s",
                len(branches), coordSetId,
                (datetime.now(pytz.utc) - startTime))

    except Exception as e:
        dbSession.rollback()
        transaction.rollback()
        logger.debug("Retrying createOrUpdateBranches, %s", e)
        logger.exception(e)
        raise self.retry(exc=e, countdown=3)

    finally:
        dbSession.close()
        conn.close()
def createOrUpdateTraceConfigs(self, traceConfigEncodedPayload: bytes
                               ) -> Dict[str, List[str]]:
    # Decode arguments
    newTraceConfigs: List[GraphDbTraceConfigTuple] = (
        Payload().fromEncodedPayload(traceConfigEncodedPayload).tuples
    )

    _validateNewTraceConfigs(newTraceConfigs)

    modelSetIdByKey = _loadModelSets()

    # Do the import
    try:
        insertedOrCreated: Dict[str, List[str]] = defaultdict(list)

        traceConfigByModelKey = defaultdict(list)
        for traceConfig in newTraceConfigs:
            traceConfigByModelKey[traceConfig.modelSetKey].append(traceConfig)
            insertedOrCreated[traceConfig.modelSetKey].append(traceConfig.key)

        for modelSetKey, traceConfigs in traceConfigByModelKey.items():
            modelSetId = modelSetIdByKey.get(modelSetKey)
            if modelSetId is None:
                modelSetId = _makeModelSet(modelSetKey)
                modelSetIdByKey[modelSetKey] = modelSetId

            _insertOrUpdateObjects(traceConfigs, modelSetId)

        return insertedOrCreated

    except Exception as e:
        logger.debug("Retrying import graphDb objects, %s", e)
        raise self.retry(exc=e, countdown=3)
    def getSegmentKeys(self, modelSetKey: str, vertexKey: str) -> List[str]:

        chunkKey = makeChunkKeyForItemKey(modelSetKey, vertexKey)
        # noinspection PyTypeChecker
        chunk: ItemKeyIndexEncodedChunk = self.encodedChunk(chunkKey)

        if not chunk:
            logger.warning("ItemKeyIndex chunk %s is missing from cache", chunkKey)
            return []

        resultsByKeyStr = Payload().fromEncodedPayload(chunk.encodedData).tuples[0]
        resultsByKey = json.loads(resultsByKeyStr)

        if vertexKey not in resultsByKey:
            logger.warning(
                "ItemKey %s is missing from index, chunkKey %s",
                vertexKey, chunkKey
            )
            return []

        packedJson = resultsByKey[vertexKey]

        segmentKeys = json.loads(packedJson)

        return segmentKeys
Exemplo n.º 24
0
    def __call__(self, *args, **kwargs):
        """ Call 
        
        """
        yesMainThread()

        try:
            # FAKE Exception so we can raise a better stack trace later
            raise Exception()
        except:
            stack = sys.exc_info()[2]

        logger.debug("Calling RPC for %s", self.__funcName)

        payloadEnvelope = yield (Payload(
            filt=copy(self._filt),
            tuples=[_VortexRPCArgTuple(args=args, kwargs=kwargs)
                    ]).makePayloadEnvelopeDefer(compressionLevel=4))

        pr = PayloadResponse(payloadEnvelope,
                             timeout=self.__timeoutSeconds,
                             resultCheck=False,
                             logTimeoutError=False,
                             destVortexName=self.__listeningVortexName)

        # Delete the payload, we don't need to keep it in memory while we
        # get the result.
        del payloadEnvelope

        pr.addCallback(self._processResponseCallback, stack)
        pr.addErrback(self._processResponseErrback, stack)

        val = yield pr
        return val
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        tuples = [self._statusController.status]

        payloadEnvelope = yield Payload(filt, tuples=tuples).makePayloadEnvelopeDefer()
        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()
        return vortexMsg
def _buildIndex(chunkKeys) -> Dict[str, bytes]:
    session = CeleryDbConn.getDbSession()

    try:
        indexQry = (session.query(
            BranchIndex.chunkKey,
            BranchIndex.key, BranchIndex.packedJson).filter(
                BranchIndex.chunkKey.in_(chunkKeys)).order_by(
                    BranchIndex.key).yield_per(1000).all())

        # Create the ChunkKey -> {key -> packedJson, key -> packedJson, ....]
        packagedJsonsByObjKeyByChunkKey = defaultdict(
            lambda: defaultdict(list))

        for item in indexQry:
            packagedJsonsByObjKeyByChunkKey[item.chunkKey][item.key].append(
                item.packedJson)

        encPayloadByChunkKey = {}

        # Sort each bucket by the key
        for chunkKey, packedJsonsByKey in packagedJsonsByObjKeyByChunkKey.items(
        ):
            tuples = json.dumps(packedJsonsByKey, sort_keys=True)

            # Create the blob data for this index.
            # It will be index-blueprint by a binary sort
            encPayloadByChunkKey[chunkKey] = Payload(
                tuples=tuples).toEncodedPayload()

        return encPayloadByChunkKey

    finally:
        session.close()
Exemplo n.º 27
0
    def _payloadEncodeDecode(self, size):
        origPayload = makeTestPayloadA(size)
        origPayload.date = None

        encodedPayload = origPayload.toEncodedPayload()
        payload = Payload().fromEncodedPayload(encodedPayload)

        self.assertEqual(payload.tuples[0], origPayload.tuples[0])
Exemplo n.º 28
0
    def makeVortexMsg(self, filt: dict,
                      tupleSelector: TupleSelector) -> Union[Deferred, bytes]:
        tuples = self._coordSetCacheController.coordSets

        payloadEnvelope = yield Payload(
            filt, tuples=tuples).makePayloadEnvelopeDefer()
        vortexMsg = yield payloadEnvelope.toVortexMsgDefer()
        return vortexMsg
Exemplo n.º 29
0
    def testPayloadToFromEncodedPayload10mb(self):
        # Create Payload
        origPayload = makeTestPayloadA()
        origPayload.tuples = ['1234567890' * 1024 ** 2]

        encodedPayload = origPayload.toEncodedPayload()
        payload = Payload().fromEncodedPayload(encodedPayload)

        self.assertEqual(payload.tuples[0], origPayload.tuples[0])
Exemplo n.º 30
0
    def testClassStaysInScope(self):
        filt = {'key': 'unittest', 'This matches': 555}

        payload = Payload()
        payload.filt = filt

        inst = PayloadEndpointPyTestAssignPayload(self)
        PayloadEndpoint(filt, inst.process)

        PayloadIO().process(payloadEnvelope=payload.makePayloadEnvelope(),
                            vortexUuid='test',
                            vortexName='test',
                            httpSession=None,
                            sendResponse=lambda x: x)

        self.assertEqual(
            self.deliveredPayloadEnvelope, payload,
            'PayloadIO/PayloadEndpoint unmatched value test error')