Exemplo n.º 1
0
    def test_msgpack_unpk(self):
        byts = b'\x92\xa4hehe\n' * 3

        unpk = s_msgpack.Unpk()
        rets = unpk.feed(byts)

        self.eq(rets, [(7, ('hehe', 10))] * 3)
Exemplo n.º 2
0
 def test_msgpack_byte(self):
     unpk = s_msgpack.Unpk()
     self.len(0, unpk.feed(b'\xa4'))
     self.len(0, unpk.feed(b'v'))
     self.len(0, unpk.feed(b'i'))
     self.len(0, unpk.feed(b's'))
     self.eq(unpk.feed(b'i')[0], (5, 'visi'))
Exemplo n.º 3
0
    def test_msgpack_surrogates(self):
        bads = '\u01cb\ufffd\ud842\ufffd\u0012'
        obyts = s_msgpack.en(bads)
        self.isinstance(obyts, bytes)

        outs = s_msgpack.un(obyts)
        self.eq(outs, bads)

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            fd.write(obyts)
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(1, items)
            self.eq(outs, bads)

            fd.close()

        unpk = s_msgpack.Unpk()
        ret = unpk.feed(obyts)
        self.len(1, ret)
        self.eq([(13, bads)], ret)
Exemplo n.º 4
0
    def test_msgpack_types(self):
        # This is a future-proofing test for msgpack to ensure that
        buf = b'\x92\xa4hehe\x85\xa3str\xa41234\xa3int\xcd\x04\xd2\xa5float\xcb@(\xae\x14z\xe1G\xae\xa3bin\xc4\x041234\xa9realworld\xac\xc7\x8b\xef\xbf\xbd\xed\xa1\x82\xef\xbf\xbd\x12'
        struct = (
            'hehe',
            {
                'str': '1234',
                'int': 1234,
                'float': 12.34,
                'bin': b'1234',
                'realworld': '\u01cb\ufffd\ud842\ufffd\u0012'
            }
        )
        unode = s_msgpack.un(buf)
        self.eq(unode, struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (71, struct))
Exemplo n.º 5
0
    def test_msgpack_large_data(self):

        big_string = s_const.mebibyte * 129 * 'V'
        struct = ('test', {'key': big_string})

        buf = s_msgpack.en(struct)

        unpacked_struct = s_msgpack.un(buf)
        self.eq(struct, unpacked_struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (135266320, struct))
Exemplo n.º 6
0
 async def iterMpkFile(self, sha256):
     '''
     Yield items from a .mpk message pack stream file.
     '''
     unpk = s_msgpack.Unpk()
     async for byts in self.get(s_common.uhex(sha256)):
         for _, item in unpk.feed(byts):
             yield item
Exemplo n.º 7
0
    async def __anit__(self, reader, writer, info=None):

        await s_base.Base.__anit__(self)

        self.iden = s_common.guid()

        self.reader = reader
        self.writer = writer

        self.rxqu = collections.deque()

        self.sock = self.writer.get_extra_info('socket')
        self.peercert = self.writer.get_extra_info('peercert')

        self._drain_lock = asyncio.Lock()

        if info is None:
            info = {}

        if not info.get('unix'):

            # disable nagle ( to minimize latency for small xmit )
            self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)

            # enable TCP keep alives...
            self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
            if hasattr(socket, 'TCP_KEEPIDLE'):
                # start sending a keep alives after 3 sec of inactivity
                self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE,
                                     3)
                # send keep alives every 3 seconds once started
                self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL,
                                     3)
                # close the socket after 5 failed keep alives (15 sec)
                self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)

        self.info = info
        self.hostname = self.info.get('hostname')

        self.unpk = s_msgpack.Unpk()

        async def fini():
            self.writer.close()
            try:
                await self.writer.wait_closed()
            except (BrokenPipeError, ConnectionResetError) as e:
                logger.debug('Link error waiting on close: %s', str(e))

        self.onfini(fini)

        if self.hostname is not None:
            if self.hostname != self.getTlsPeerCn():
                mesg = f'Expected: {self.hostname} Got: {self.getTlsPeerCn()}'
                await self.fini()
                raise s_exc.BadCertHost(mesg=mesg)
Exemplo n.º 8
0
    def checkTypes(self, enfunc):
        # This is a future-proofing test for msgpack to ensure that
        buf = b'\x92\xa4hehe\x85\xa3str\xa41234\xa3int\xcd\x04\xd2\xa5float\xcb@(\xae\x14z\xe1G\xae\xa3bin\xc4\x041234\xa9realworld\xac\xc7\x8b\xef\xbf\xbd\xed\xa1\x82\xef\xbf\xbd\x12'
        struct = (
            'hehe',
            {
                'str': '1234',
                'int': 1234,
                'float': 12.34,
                'bin': b'1234',
                'realworld': '\u01cb\ufffd\ud842\ufffd\u0012'
            }
        )
        unode = s_msgpack.un(buf)
        self.eq(unode, struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (71, struct))

        # Generic isok helper
        self.true(s_msgpack.isok(1))
        self.true(s_msgpack.isok('1'))
        self.true(s_msgpack.isok(1.1))
        self.true(s_msgpack.isok(b'1'))
        self.true(s_msgpack.isok(None))
        self.true(s_msgpack.isok(True))
        self.true(s_msgpack.isok(False))
        self.true(s_msgpack.isok([1]))
        self.true(s_msgpack.isok((1,)))
        self.true(s_msgpack.isok({1: 1}))
        # unpackage types
        self.false(s_msgpack.isok({1, 2}))  # set
        self.false(s_msgpack.isok(print))  # function

        buf2 = b'\x81\xc0\xcd\x04\xd2'
        struct2 = {
            None: 1234
        }
        ustruct2 = s_msgpack.un(buf2)
        self.eq(ustruct2, struct2)
        pbuf2 = enfunc(ustruct2)
        self.eq(buf2, pbuf2)
Exemplo n.º 9
0
    async def iterMpkFile(self, sha256):
        '''
        Yield items from a MsgPack (.mpk) file in the Axon.

        Args:
            sha256 (str): The sha256 hash of the file as a string.

        Yields:
            Unpacked items from the bytes.
        '''
        unpk = s_msgpack.Unpk()
        async for byts in self.get(s_common.uhex(sha256)):
            for _, item in unpk.feed(byts):
                yield item
Exemplo n.º 10
0
    def __init__(self, plex, sock):

        Link.__init__(self, None)

        self.plex = plex
        self.sock = sock

        self.txbuf = b''
        self.txque = collections.deque()  # (byts, info)
        self.txlock = threading.Lock()

        self.unpk = s_msgpack.Unpk()
        self.flags = selectors.EVENT_READ

        def fini():
            self.plex._finiPlexSock(self.sock)

        self.onfini(fini)
Exemplo n.º 11
0
    async def __anit__(self, reader, writer, info=None):

        await s_base.Base.__anit__(self)

        self.iden = s_common.guid()

        self.reader = reader
        self.writer = writer

        self.rxqu = collections.deque()

        self.sock = self.writer.get_extra_info('socket')

        self._drain_lock = asyncio.Lock()

        if info is None:
            info = {}

        if not info.get('unix'):

            # disable nagle ( to minimize latency for small xmit )
            self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)

            # enable TCP keep alives...
            self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
            if hasattr(socket, 'TCP_KEEPIDLE'):
                # start sending a keep alives after 3 sec of inactivity
                self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE,
                                     3)
                # send keep alives every 3 seconds once started
                self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL,
                                     3)
                # close the socket after 5 failed keep alives (15 sec)
                self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)

        self.info = info

        self.unpk = s_msgpack.Unpk()

        async def fini():
            self.writer.close()

        self.onfini(fini)
Exemplo n.º 12
0
    def __init__(self, plex, sock):

        Link.__init__(self, None)

        self.plex = plex
        self.sock = sock
        self.fino = sock.fileno()

        self.txbuf = b''
        self.txque = collections.deque()  #(byts, info)
        self.txlock = threading.Lock()

        self.unpk = s_msgpack.Unpk()
        self.flags = select.EPOLLIN | select.EPOLLERR | select.EPOLLET

        def fini():
            self.plex._finiPlexSock(self.sock)

        self.onfini(fini)
Exemplo n.º 13
0
    def __init__(self, fdir, path, xmit=None):

        s_eventbus.EventBus.__init__(self)

        self.wmin = fdir.getConfOpt('window:min')
        self.wmax = fdir.getConfOpt('window:max')
        self.wfill = fdir.getConfOpt('window:fill')

        if not os.path.isfile(path):
            with open(path, 'wb') as fd:
                fd.write(b'\x00' * 8)

        self.lock = threading.RLock()

        self.dirty = False
        self.caught = False

        self.filling = s_atomic.CmpSet(False)

        self.fdir = fdir
        self._xmit = xmit

        self.unpk = s_msgpack.Unpk()
        self.dequ = collections.deque()

        # open our state machine header atom
        self.head = s_atomfile.openAtomFile(path, memok=True)
        self.onfini(self.head.fini)

        # the next expected ack
        self.nack = struct.unpack('<Q', self.head.readoff(0, 8))[0]

        # if the FifiDir has moved past us, catch up... :(
        if self.nack < fdir.seqs[0]:
            self.nack = fdir.seqs[0]

        self.nseq = self.nack
        self._initFifoAtom(self.nseq)

        # what's the last sequence in the window...
        self._fill()
Exemplo n.º 14
0
 async def _httpRespMsgpack(self):
     byts = self.valu.get('body')
     unpk = s_msgpack.Unpk()
     for _, item in unpk.feed(byts):
         yield item
Exemplo n.º 15
0
    async def test_spawncore(self):
        # This test makes a real Cortex in a remote process, and then
        # gets the spawninfo from that real Cortex in order to make a
        # local SpawnCore. This avoids the problem of being unable to
        # open lmdb environments multiple times by the same process
        # and allows direct testing of the SpawnCore object.

        mpctx = multiprocessing.get_context('spawn')
        queue = mpctx.Queue()
        event = mpctx.Event()

        conf = {
            'storm:log': True,
            'storm:log:level': logging.INFO,
            'modules': [('synapse.tests.utils.TestModule', {})],
        }
        queries = [
            '[test:str="Cortex from the aether!"]',
        ]
        with self.getTestDir() as dirn:
            args = (dirn, conf, queries, queue, event)
            proc = mpctx.Process(target=make_core, args=args)
            try:
                proc.start()
                spawninfo = queue.get(timeout=30)

                async with await s_spawn.SpawnCore.anit(spawninfo) as core:
                    root = await core.auth.getUserByName('root')
                    q = '''test:str
                    $lib.print($lib.str.format("{n}", n=$node.repr()))
                    | limit 1'''
                    item = {
                        'user': root.iden,
                        'view': list(core.views.keys())[0],
                        'storm': {
                            'query': q,
                            'opts': None,
                        }
                    }

                    # Test the storm implementation used by spawncore
                    msgs = await s_test.alist(s_spawn.storm(core, item))
                    podes = [m[1] for m in msgs if m[0] == 'node']
                    e = 'Cortex from the aether!'
                    self.len(1, podes)
                    self.eq(podes[0][0], ('test:str', e))
                    self.stormIsInPrint(e, msgs)

                    # Direct test of the _innerloop code.
                    todo = mpctx.Queue()
                    done = mpctx.Queue()

                    # Test poison - this would cause the corework to exit
                    todo.put(None)
                    self.none(await s_spawn._innerloop(core, todo, done))

                    # Test a real item with a link associated with it. This ends
                    # up getting a bunch of telepath message directly.
                    todo_item = item.copy()
                    link0, sock0 = await s_link.linksock()
                    todo_item['link'] = link0.getSpawnInfo()
                    todo.put(todo_item)
                    self.true(await s_spawn._innerloop(core, todo, done))
                    resp = done.get(timeout=12)
                    self.false(resp)
                    buf0 = sock0.recv(1024 * 16)
                    unpk = s_msgpack.Unpk()
                    msgs = [msg for (offset, msg) in unpk.feed(buf0)]
                    self.eq({'t2:genr', 't2:yield'}, {m[0] for m in msgs})

                    await link0.fini()  # We're done with the link now
                    todo.close()
                    done.close()

                # Test the workloop directly - this again just gets telepath
                # messages back. This does use poison to kill the workloop.
                todo = mpctx.Queue()
                done = mpctx.Queue()

                task = asyncio.create_task(
                    s_spawn._workloop(spawninfo, todo, done))
                await asyncio.sleep(0.01)
                link1, sock1 = await s_link.linksock()
                todo_item = item.copy()
                todo_item['link'] = link1.getSpawnInfo()
                todo.put(todo_item)
                # Don't block the IO loop!
                resp = await s_coro.executor(done.get, timeout=12)
                self.false(resp)
                buf0 = sock1.recv(1024 * 16)
                unpk = s_msgpack.Unpk()
                msgs = [msg for (offset, msg) in unpk.feed(buf0)]
                self.eq({'t2:genr', 't2:yield'}, {m[0] for m in msgs})
                await link1.fini()  # We're done with the link now
                # Poison the queue - this should close the task
                todo.put(None)
                self.none(await asyncio.wait_for(task, timeout=12))

                todo.close()
                done.close()

            finally:

                queue.close()
                event.set()
                proc.join(12)
Exemplo n.º 16
0
    async def test_tools_storm(self):

        async with self.getTestCore() as core:

            await core.addTagProp('foo', ('int', {}), {})

            pars = s_t_storm.getArgParser()
            opts = pars.parse_args(('woot', ))
            self.eq('woot', opts.cortex)

            q = '$lib.model.ext.addFormProp(inet:ipv4, "_test:score", (int, $lib.dict()), $lib.dict())'
            await core.callStorm(q)

            async with core.getLocalProxy() as proxy:

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine(
                        '[inet:ipv4=1.2.3.4 +#foo=2012 +#bar +#baz:foo=10 :_test:score=7]'
                    )
                    text = str(outp)
                    self.isin('.....', text)
                    self.isin('inet:ipv4=1.2.3.4', text)
                    self.isin(':type = unicast', text)
                    self.isin(':_test:score = 7', text)
                    self.isin('.created = ', text)
                    self.isin('#bar', text)
                    self.isin('#baz:foo = 10', text)
                    self.isin(
                        '#foo = (2012/01/01 00:00:00.000, 2012/01/01 00:00:00.001)',
                        text)
                    self.isin('complete. 1 nodes in', text)

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine('!quit')
                    self.isin('o/', str(outp))
                    self.true(scli.isfini)

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine('!help')
                    self.isin('!quit', str(outp))

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine('$lib.print(woot)')
                    self.isin('woot', str(outp))

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine('$lib.warn(woot)')
                    self.isin('WARNING: woot', str(outp))

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine('---')
                    self.isin(
                        "---\n ^\nSyntax Error: No terminal defined for '-' at line 1 col 2",
                        str(outp))

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine('spin |' + ' ' * 80 + '---')
                    self.isin(
                        "...                             ---\n                                 ^",
                        str(outp))

                outp = s_output.OutPutStr()
                async with await s_t_storm.StormCli.anit(proxy,
                                                         outp=outp) as scli:
                    await scli.runCmdLine('---' + ' ' * 80 + 'spin')
                    self.isin("---                            ...\n ^",
                              str(outp))

            lurl = core.getLocalUrl()

            outp = s_output.OutPutStr()
            await s_t_storm.main((lurl, '$lib.print(woot)'), outp=outp)
            self.isin('woot', str(outp))

            outp = s_output.OutPutStr()
            await s_t_storm.main((lurl, f'!runfile --help'), outp=outp)
            self.isin('Run a local storm file', str(outp))

            with self.getTestDir() as dirn:

                path = os.path.join(dirn, 'foo.storm')
                with open(path, 'wb') as fd:
                    fd.write(b'$lib.print(woot)')

                outp = s_output.OutPutStr()
                await s_t_storm.main((lurl, f'!runfile {path}'), outp=outp)
                self.isin(f'running storm file: {path}', str(outp))
                self.isin('woot', str(outp))

                outp = s_output.OutPutStr()
                await s_t_storm.main((lurl, f'!runfile /newp.storm'),
                                     outp=outp)
                self.isin(f'no such file: /newp.storm', str(outp))

                outp = s_output.OutPutStr()
                await s_t_storm.main((lurl, f'!pushfile /newp'), outp=outp)
                self.isin(f'no such file: /newp', str(outp))

                outp = s_output.OutPutStr()
                await s_t_storm.main((lurl, f'!pushfile {path}'), outp=outp)
                text = str(outp)
                self.isin(f'uploading file: {path}', text)
                self.isin(':name = foo.storm', text)
                self.isin(
                    ':sha256 = c00adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3',
                    text)

                outp = s_output.OutPutStr()
                path = os.path.join(dirn, 'bar.storm')
                await s_t_storm.main((
                    lurl,
                    f'!pullfile c00adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3 {path}'
                ),
                                     outp=outp)

                text = str(outp)
                self.isin(
                    'downloading sha256: c00adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3',
                    text)
                self.isin(f'saved to: {path}', text)

                with s_common.genfile(path) as fd:
                    self.isin('woot', fd.read().decode())

                outp = s_output.OutPutStr()
                await s_t_storm.main((
                    lurl,
                    f'!pullfile c11adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3 {path}'
                ),
                                     outp=outp)
                text = str(outp)
                self.isin('Axon does not contain the requested file.', text)

                await scli.runCmdLine('[test:str=foo +#foo +#bar +#baz]')
                await scli.runCmdLine('[test:str=bar +#foo +#bar +#baz]')

                path = os.path.join(dirn, 'export1.nodes')
                await s_t_storm.main((lurl, f'!export {path} {{ test:str }}'),
                                     outp=outp)
                text = str(outp)
                self.isin(f'saved 2 nodes to: {path}', text)

                with open(path, 'rb') as fd:
                    byts = fd.read()
                    podes = [i[1] for i in s_msgpack.Unpk().feed(byts)]
                    self.sorteq(('bar', 'foo'), [p[0][1] for p in podes])
                    for pode in podes:
                        self.sorteq(('bar', 'baz', 'foo'), pode[1]['tags'])

                path = os.path.join(dirn, 'export2.nodes')
                q = f'!export {path} {{ test:str }} --include-tags foo bar'
                await s_t_storm.main((lurl, q), outp=outp)
                text = str(outp)
                self.isin(f'saved 2 nodes to: {path}', text)

                with open(path, 'rb') as fd:
                    byts = fd.read()
                    podes = [i[1] for i in s_msgpack.Unpk().feed(byts)]
                    self.sorteq(('bar', 'foo'), [p[0][1] for p in podes])
                    for pode in podes:
                        self.sorteq(('bar', 'foo'), pode[1]['tags'])

                path = os.path.join(dirn, 'export3.nodes')
                q = f'!export {path} {{ test:str }} --no-tags'
                await s_t_storm.main((lurl, q), outp=outp)
                text = str(outp)
                self.isin(f'saved 2 nodes to: {path}', text)

                with open(path, 'rb') as fd:
                    byts = fd.read()
                    podes = [i[1] for i in s_msgpack.Unpk().feed(byts)]
                    self.sorteq(('bar', 'foo'), [p[0][1] for p in podes])
                    for pode in podes:
                        self.eq({}, pode[1]['tags'])

                await s_t_storm.main((lurl, f'!export {path} {{ test:newp }}'),
                                     outp=outp)
                text = str(outp)
                self.isin('No property named test:newp.', text)