async def test_tools_autodoc_docmodel(self):

        with self.getTestDir() as path:

            argv = ['--doc-model', '--savedir', path]

            outp = self.getTestOutp()
            self.eq(await s_autodoc.main(argv, outp=outp), 0)

            with s_common.genfile(path, 'datamodel_types.rst') as fd:
                buf = fd.read()

            s = buf.decode()
            self.isin('Base types are defined via Python classes.', s)
            self.isin('synapse.models.inet.Addr', s)
            self.isin('Regular types are derived from BaseTypes.', s)
            self.isin(r'inet\:server', s)

            with s_common.genfile(path, 'datamodel_forms.rst') as fd:
                buf = fd.read()

            s = buf.decode()
            self.isin(
                'Forms are derived from types, or base types. Forms represent node types in the graph.',
                s)
            self.isin(r'inet\:ipv4', s)
            self.notin(r'file\:bytes:.created', s)
            self.isin(
                'Universal props are system level properties which may be present on every node.',
                s)
            self.isin('.created', s)
            self.notin('..created\n', s)
    async def test_csvtool(self):

        async with self.getTestCore() as core:

            url = core.getLocalUrl()

            dirn = s_common.gendir(core.dirn, 'junk')

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm)

            argv = [
                '--csv-header', '--debug', '--cortex', url, '--logfile',
                logpath, stormpath, csvpath
            ]
            outp = self.getTestOutp()

            await s_csvtool.main(argv, outp=outp)
            outp.expect('oh hai')
            outp.expect('2 nodes')
Exemple #3
0
    def test_msgpack_large_data(self):

        big_string = s_const.mebibyte * 129 * 'V'
        struct = ('test', {'key': big_string})

        buf = s_msgpack.en(struct)

        unpacked_struct = s_msgpack.un(buf)
        self.eq(struct, unpacked_struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (135266320, struct))
Exemple #4
0
    def test_doc_data(self):
        with self.getTestDir() as dirn:
            s_common.gendir(dirn, 'docdata', 'stuff')

            docdata = s_common.genpath(dirn, 'docdata')

            root = s_common.genpath(dirn, 'synapse', 'userguides')

            d = {'key': 'value'}

            s_common.jssave(d, docdata, 'data.json')
            s_common.yamlsave(d, docdata, 'data.yaml')
            s_msgpack.dumpfile(d, os.path.join(docdata, 'data.mpk'))
            with s_common.genfile(docdata, 'stuff', 'data.txt') as fd:
                fd.write('beep'.encode())
            with s_common.genfile(docdata, 'data.jsonl') as fd:
                fd.write(json.dumps(d).encode() + b'\n')
                fd.write(json.dumps(d).encode() + b'\n')
                fd.write(json.dumps(d).encode() + b'\n')

            data = s_jupyter.getDocData('data.json', root)
            self.eq(data, d)
            data = s_jupyter.getDocData('data.yaml', root)
            self.eq(data, d)
            data = s_jupyter.getDocData('data.mpk', root)
            self.eq(data, d)
            data = s_jupyter.getDocData('stuff/data.txt', root)
            self.eq(data, b'beep')
            data = s_jupyter.getDocData('data.jsonl', root)
            self.eq(data, [d, d, d])

            self.raises(ValueError, s_jupyter.getDocData, 'newp.bin', root)
            self.raises(ValueError, s_jupyter.getDocData,
                        '../../../../../../etc/passwd', root)
    async def test_csvtool(self):

        async with self.getTestCore() as core:

            url = core.getLocalUrl()

            dirn = s_common.gendir(core.dirn, 'junk')

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm)

            argv = [
                '--csv-header', '--debug', '--cortex', url, '--logfile',
                logpath, stormpath, csvpath
            ]
            outp = self.getTestOutp()

            await s_csvtool.main(argv, outp=outp)
            outp.expect('oh hai')
            outp.expect('2 nodes')

            with mock.patch('synapse.telepath.Proxy._getSynVers',
                            self._getOldSynVers):
                outp = self.getTestOutp()
                await s_csvtool.main(argv, outp=outp)
                outp.expect(
                    'Cortex version 0.0.0 is outside of the csvtool supported range'
                )
    async def test_csvtool_cli(self):

        with self.getTestDir() as dirn:

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm)

            argv = [
                '--csv-header', '--debug', '--cli', '--test', '--logfile',
                logpath, stormpath, csvpath
            ]
            outp = self.getTestOutp()

            cmdg = s_t_utils.CmdGenerator([
                'storm --hide-props inet:fqdn',
                EOFError(),
            ])

            with self.withCliPromptMockExtendOutp(outp):
                with self.withTestCmdr(cmdg):
                    await s_csvtool.main(argv, outp=outp)

            outp.expect('inet:fqdn=google.com')
            outp.expect('2 nodes')
Exemple #7
0
    def test_msgpack_types(self):
        # This is a future-proofing test for msgpack to ensure that
        buf = b'\x92\xa4hehe\x85\xa3str\xa41234\xa3int\xcd\x04\xd2\xa5float\xcb@(\xae\x14z\xe1G\xae\xa3bin\xc4\x041234\xa9realworld\xac\xc7\x8b\xef\xbf\xbd\xed\xa1\x82\xef\xbf\xbd\x12'
        struct = (
            'hehe',
            {
                'str': '1234',
                'int': 1234,
                'float': 12.34,
                'bin': b'1234',
                'realworld': '\u01cb\ufffd\ud842\ufffd\u0012'
            }
        )
        unode = s_msgpack.un(buf)
        self.eq(unode, struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (71, struct))
Exemple #8
0
def loadPkgProto(path, opticdir=None):

    full = s_common.genpath(path)
    pkgdef = s_common.yamlload(full)

    if isinstance(pkgdef['version'], str):
        pkgdef['version'] = chopSemVer(pkgdef['version'])

    protodir = os.path.dirname(full)

    for mod in pkgdef.get('modules', ()):
        name = mod.get('name')
        with s_common.genfile(protodir, 'storm', 'modules', name) as fd:
            mod['storm'] = fd.read().decode()

    for cmd in pkgdef.get('commands', ()):
        name = cmd.get('name')
        with s_common.genfile(protodir, 'storm', 'commands', name) as fd:
            cmd['storm'] = fd.read().decode()

    if opticdir is None:
        opticdir = s_common.genpath(protodir, 'optic')

    if os.path.isdir(opticdir):
        pkgdef.setdefault('optic', {})
        pkgdef['optic'].setdefault('files', {})
        loadOpticFiles(pkgdef, opticdir)

    return pkgdef
Exemple #9
0
    def test_msgpack_surrogates(self):
        bads = '\u01cb\ufffd\ud842\ufffd\u0012'
        obyts = s_msgpack.en(bads)
        self.isinstance(obyts, bytes)

        outs = s_msgpack.un(obyts)
        self.eq(outs, bads)

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            fd.write(obyts)
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(1, items)
            self.eq(outs, bads)

            fd.close()

        unpk = s_msgpack.Unpk()
        ret = unpk.feed(obyts)
        self.len(1, ret)
        self.eq([(13, bads)], ret)
Exemple #10
0
    async def test_tools_autodoc_stormtypes(self):
        with self.getTestDir() as path:

            argv = ['--savedir', path, '--doc-stormtypes']
            outp = self.getTestOutp()
            self.eq(await s_autodoc.main(argv, outp=outp), 0)

            with s_common.genfile(path, 'stormtypes_libs.rst') as fd:
                libbuf = fd.read()
            libtext = libbuf.decode()

            self.isin(
                '.. _stormlibs-lib-print:\n\n$lib.print(mesg, \\*\\*kwargs)\n============================',
                libtext)
            self.isin('Print a message to the runtime.', libtext)
            self.isin(
                '\\*\\*kwargs (any): Keyword arguments to substitute into the mesg.',
                libtext)
            self.isin(
                '.. _stormlibs-lib-time:\n\n*********\n$lib.time\n*********',
                libtext)
            self.isin('A Storm Library for interacting with timestamps.',
                      libtext)

            with s_common.genfile(path, 'stormtypes_prims.rst') as fd:
                primbuf = fd.read()
            primstext = primbuf.decode()
            self.isin(
                '.. _stormprims-storm-auth-user:\n\n*****************\nstorm\\:auth\\:user\n*****************',
                primstext)
            self.isin('iden\n====\n\nThe User iden.', primstext)
Exemple #11
0
    def test_msgpack_surrogates(self):
        bads = '\u01cb\ufffd\ud842\ufffd\u0012'
        obyts = s_msgpack.en(bads)
        self.isinstance(obyts, bytes)

        outs = s_msgpack.un(obyts)
        self.eq(outs, bads)

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            fd.write(obyts)
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(1, items)
            self.eq(outs, bads)

            fd.close()

        unpk = s_msgpack.Unpk()
        ret = unpk.feed(obyts)
        self.len(1, ret)
        self.eq([(13, bads)], ret)
Exemple #12
0
    def test_msgpack_large_data(self):

        big_string = s_const.mebibyte * 129 * 'V'
        struct = ('test', {'key': big_string})

        buf = s_msgpack.en(struct)

        unpacked_struct = s_msgpack.un(buf)
        self.eq(struct, unpacked_struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (135266320, struct))
    async def test_csvtool_missingvals(self):

        async with self.getTestCore() as core:

            url = core.getLocalUrl()

            dirn = s_common.gendir(core.dirn, 'junk')

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile_missing)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm_missing)

            argv = [
                '--csv-header', '--debug', '--cortex', url, '--logfile',
                logpath, stormpath, csvpath
            ]
            outp = self.getTestOutp()

            await s_csvtool.main(argv, outp=outp)
            outp.expect('hello hello')
            outp.expect("'fqdn': 'google.com'")
            outp.expect('3 nodes')
Exemple #14
0
    async def test_csvtool_cli(self):
        with self.getTestDir() as dirn:

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm)

            argv = [
                '--csv-header', '--debug', '--cli', '--test', '--logfile',
                logpath, stormpath, csvpath
            ]
            outp = self.getTestOutp()

            cmdg = s_t_utils.CmdGenerator(['storm --hide-props inet:fqdn'],
                                          on_end=EOFError)
            with mock.patch('synapse.lib.cli.get_input', cmdg):
                await s_coro.executor(s_csvtool.main, argv, outp=outp)

            outp.expect('inet:fqdn=google.com')
            outp.expect('2 nodes (9 created)')
    async def test_tools_autodoc_docmodel(self):

        with self.getTestDir() as path:

            argv = ['--doc-model', '--savedir', path]

            outp = self.getTestOutp()
            self.eq(await s_autodoc.main(argv, outp=outp), 0)

            with s_common.genfile(path, 'datamodel_types.rst') as fd:
                buf = fd.read()

            s = buf.decode()
            self.isin('Base types are defined via Python classes.', s)
            self.isin('synapse.models.inet.Addr', s)
            self.isin('Regular types are derived from BaseTypes.', s)
            self.isin(r'inet\:server', s)

            with s_common.genfile(path, 'datamodel_forms.rst') as fd:
                buf = fd.read()

            s = buf.decode()
            self.isin('Forms are derived from types, or base types. Forms represent node types in the graph.', s)
            self.isin(r'inet\:ipv4', s)
            self.isin('Universal props are system level properties which may be present on every node.', s)
            self.isin('.created', s)
Exemple #16
0
    def test_doc_data(self):
        with self.getTestDir() as dirn:
            s_common.gendir(dirn, 'docdata', 'stuff')

            docdata = s_common.genpath(dirn, 'docdata')

            root = s_common.genpath(dirn, 'synapse', 'userguides')

            d = {'key': 'value'}

            s_common.jssave(d, docdata, 'data.json')
            s_common.yamlsave(d, docdata, 'data.yaml')
            s_msgpack.dumpfile(d, os.path.join(docdata, 'data.mpk'))
            with s_common.genfile(docdata, 'stuff', 'data.txt') as fd:
                fd.write('beep'.encode())
            with s_common.genfile(docdata, 'data.jsonl') as fd:
                fd.write(json.dumps(d).encode() + b'\n')
                fd.write(json.dumps(d).encode() + b'\n')
                fd.write(json.dumps(d).encode() + b'\n')

            data = s_jupyter.getDocData('data.json', root)
            self.eq(data, d)
            data = s_jupyter.getDocData('data.yaml', root)
            self.eq(data, d)
            data = s_jupyter.getDocData('data.mpk', root)
            self.eq(data, d)
            data = s_jupyter.getDocData('stuff/data.txt', root)
            self.eq(data, b'beep')
            data = s_jupyter.getDocData('data.jsonl', root)
            self.eq(data, [d, d, d])

            self.raises(ValueError, s_jupyter.getDocData, 'newp.bin', root)
            self.raises(ValueError, s_jupyter.getDocData,
                        '../../../../../../etc/passwd', root)
Exemple #17
0
    async def test_csvtool(self):

        async with self.getTestDmon(mirror='dmoncore') as dmon:

            url = self.getTestUrl(dmon, 'core')

            with self.getTestDir() as dirn:

                logpath = s_common.genpath(dirn, 'csvtest.log')

                csvpath = s_common.genpath(dirn, 'csvtest.csv')
                with s_common.genfile(csvpath) as fd:
                    fd.write(csvfile)

                stormpath = s_common.genpath(dirn, 'csvtest.storm')
                with s_common.genfile(stormpath) as fd:
                    fd.write(csvstorm)

                podes = []

                argv = [
                    '--csv-header', '--debug', '--cortex', url, '--logfile',
                    logpath, stormpath, csvpath
                ]
                outp = self.getTestOutp()

                await s_coro.executor(s_csvtool.main, argv, outp=outp)

                outp.expect('2 nodes (9 created)')
            def pushfile():

                with self.getTestDir() as dirn:

                    nullpath = os.path.join(dirn, 'null.txt')
                    visipath = os.path.join(dirn, 'visi.txt')

                    with s_common.genfile(visipath) as fd:
                        fd.write(b'visi')

                    with self.getTestProxy(dmon, 'axon00') as axon:
                        self.len(1, axon.wants([visihash]))

                    outp = self.getTestOutp()
                    args = ['-a', axonurl,
                            '-c', coreurl,
                            '-t', 'foo.bar,baz.faz',
                            visipath]

                    self.eq(0, s_pushfile.main(args, outp))
                    self.true(outp.expect('Uploaded [visi.txt] to axon'))
                    self.true(outp.expect('file: visi.txt (4) added to core'))

                    with self.getTestProxy(dmon, 'axon00') as axon:
                        self.len(0, axon.wants([visihash]))
                        self.eq(b'visi', b''.join([buf for buf in axon.get(visihash)]))

                    outp = self.getTestOutp()
                    self.eq(0, s_pushfile.main(args, outp))
                    self.true(outp.expect('Axon already had [visi.txt]'))

                    with self.getTestProxy(dmon, 'core', user='******', passwd='root') as core:
                        self.len(1, core.eval(f'file:bytes={s_common.ehex(visihash)}'))
                        self.len(1, core.eval('file:bytes:size=4'))
                        self.len(1, core.eval('#foo.bar'))
                        self.len(1, core.eval('#baz.faz'))

                    # Ensure user can't push a non-existant file and that it won't exist
                    args = ['-a', axonurl, nullpath]
                    self.raises(s_exc.NoSuchFile, s_pushfile.main, args, outp=outp)

                    with self.getTestProxy(dmon, 'axon00') as axon:
                        self.len(1, axon.wants([nullhash]))

                    with s_common.genfile(nullpath) as fd:
                        fd.write(b'')

                    outp = self.getTestOutp()
                    args = ['-a', axonurl,
                            '-c', coreurl,
                            '-t', 'empty',
                            nullpath]

                    self.eq(0, s_pushfile.main(args, outp))

                    with self.getTestProxy(dmon, 'axon00') as axon:
                        self.len(0, axon.wants([nullhash]))
                        self.eq(b'', b''.join([buf for buf in axon.get(nullhash)]))
    def checkTypes(self, enfunc):
        # This is a future-proofing test for msgpack to ensure that
        buf = b'\x92\xa4hehe\x85\xa3str\xa41234\xa3int\xcd\x04\xd2\xa5float\xcb@(\xae\x14z\xe1G\xae\xa3bin\xc4\x041234\xa9realworld\xac\xc7\x8b\xef\xbf\xbd\xed\xa1\x82\xef\xbf\xbd\x12'
        struct = (
            'hehe',
            {
                'str': '1234',
                'int': 1234,
                'float': 12.34,
                'bin': b'1234',
                'realworld': '\u01cb\ufffd\ud842\ufffd\u0012'
            }
        )
        unode = s_msgpack.un(buf)
        self.eq(unode, struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (71, struct))

        # Generic isok helper
        self.true(s_msgpack.isok(1))
        self.true(s_msgpack.isok('1'))
        self.true(s_msgpack.isok(1.1))
        self.true(s_msgpack.isok(b'1'))
        self.true(s_msgpack.isok(None))
        self.true(s_msgpack.isok(True))
        self.true(s_msgpack.isok(False))
        self.true(s_msgpack.isok([1]))
        self.true(s_msgpack.isok((1,)))
        self.true(s_msgpack.isok({1: 1}))
        # unpackage types
        self.false(s_msgpack.isok({1, 2}))  # set
        self.false(s_msgpack.isok(print))  # function

        buf2 = b'\x81\xc0\xcd\x04\xd2'
        struct2 = {
            None: 1234
        }
        ustruct2 = s_msgpack.un(buf2)
        self.eq(ustruct2, struct2)
        pbuf2 = enfunc(ustruct2)
        self.eq(buf2, pbuf2)
                        def pushfile():

                            with self.getTestDir() as dirn:

                                nullpath = os.path.join(dirn, 'null.txt')
                                visipath = os.path.join(dirn, 'visi.txt')

                                with s_common.genfile(visipath) as fd:
                                    fd.write(b'visi')

                                self.len(1, axonprox.wants([visihash]))

                                outp = self.getTestOutp()
                                args = ['-a', axonurl,
                                        '-c', coreurl,
                                        '-t', 'foo.bar,baz.faz',
                                        visipath]

                                self.eq(0, s_pushfile.main(args, outp))
                                self.true(outp.expect('Uploaded [visi.txt] to axon'))
                                self.true(outp.expect('file: visi.txt (4) added to core'))

                                self.len(0, axonprox.wants([visihash]))
                                self.eq(b'visi', b''.join([buf for buf in axonprox.get(visihash)]))

                                outp = self.getTestOutp()
                                self.eq(0, s_pushfile.main(args, outp))
                                self.true(outp.expect('Axon already had [visi.txt]'))

                                self.len(1, coreprox.eval(f'file:bytes={s_common.ehex(visihash)}'))
                                self.len(1, coreprox.eval('file:bytes:size=4'))
                                self.len(1, coreprox.eval('#foo.bar'))
                                self.len(1, coreprox.eval('#baz.faz'))

                                # Ensure user can't push a non-existant file and that it won't exist
                                args = ['-a', axonurl, nullpath]
                                self.raises(s_exc.NoSuchFile, s_pushfile.main, args, outp=outp)

                                self.len(1, axonprox.wants([nullhash]))

                                with s_common.genfile(nullpath) as fd:
                                    fd.write(b'')

                                outp = self.getTestOutp()
                                args = ['-a', axonurl,
                                        '-c', coreurl,
                                        '-t', 'empty',
                                        nullpath]

                                self.eq(0, s_pushfile.main(args, outp))

                                self.len(0, axonprox.wants([nullhash]))
                                self.eq(b'', b''.join([buf for buf in axonprox.get(nullhash)]))
                            return 1
Exemple #21
0
    def test_fmt_lines(self):
        with self.getTestDir() as dirn:
            linep = s_common.genpath(dirn, 'woot.txt')
            with s_common.genfile(linep) as fd:
                fd.write(testlines)

            with s_common.genfile(linep) as fd:
                lines = list(
                    s_encoding.iterdata(fd, close_fd=False, format='lines'))
                self.len(2, lines)
                e = ['foo.com', 'bar.com']
                self.eq(lines, e)
Exemple #22
0
    def test_fmt_mpk(self):
        with self.getTestDir() as dirn:
            fp = s_common.genpath(dirn, 'woot.mpk')
            with s_common.genfile(fp) as fd:
                fd.write(s_msgpack.en('foo.com'))
                fd.write(s_msgpack.en('bar.com'))

            with s_common.genfile(fp) as fd:
                lines = list(
                    s_encoding.iterdata(fd, close_fd=False, format='mpk'))
                self.len(2, lines)
                e = ['foo.com', 'bar.com']
                self.eq(lines, e)
Exemple #23
0
    def test_fmt_xml(self):
        with self.getTestDir() as dirn:
            xmlp = s_common.genpath(dirn, 'woot.xml')
            with s_common.genfile(xmlp) as fd:
                fd.write(testxml)

            with s_common.genfile(xmlp) as fd:
                lines = list(
                    s_encoding.iterdata(fd, close_fd=False, format='xml'))
                self.len(1, lines)
                line = lines[0]
                elem = line.get('data')
                self.len(3, list(elem))
Exemple #24
0
    async def test_config_base(self):

        confdefs = (
            ('foo', 20, int),
        )

        conf = s_config.Config(confdefs)

        with self.raises(s_exc.NoSuchName):
            await conf.set('hehe', 'haha')

        await conf.loadConfDict({'foo': 30})

        self.eq(conf.get('foo'), 30)

        with self.getTestDir() as dirn:

            path = os.path.join(dirn, 'foo.yaml')
            with s_common.genfile(path) as fd:
                fd.write(b'foo: 8080')

            await conf.loadConfYaml(path)

        self.eq(conf.get('foo'), 8080)

        with self.setTstEnvars(SYN_CONF_TEST_FOO='31337'):

            await conf.loadConfEnvs('SYN_CONF_TEST')

            self.eq(conf.get('foo'), 31337)

            info = dict(iter(conf))
            self.eq(31337, info.get('foo'))
Exemple #25
0
    async def _initCellHttp(self):

        self.httpds = []
        self.sessstor = s_lmdbslab.GuidStor(self.slab, 'http:sess')

        async def fini():
            for http in self.httpds:
                http.stop()

        self.onfini(fini)

        # Generate/Load a Cookie Secret
        secpath = os.path.join(self.dirn, 'cookie.secret')
        if not os.path.isfile(secpath):
            with s_common.genfile(secpath) as fd:
                fd.write(s_common.guid().encode('utf8'))

        with s_common.getfile(secpath) as fd:
            secret = fd.read().decode('utf8')

        opts = {
            'cookie_secret': secret,
            'websocket_ping_interval': 10
        }

        self.wapp = t_web.Application(**opts)
        self._initCellHttpApis()
Exemple #26
0
 def openLogFd(self, opts):
     opath = self.locs.get('log:fp')
     if opath:
         self.printf('Must call --off to disable current file before starting a new file.')
         return
     fmt = opts.format
     path = opts.path
     nodes_only = opts.nodes_only
     splice_only = opts.splices_only
     if not path:
         ts = s_time.repr(s_common.now(), True)
         fn = f'storm_{ts}.{fmt}'
         path = s_common.getSynPath('stormlogs', fn)
     self.printf(f'Starting logfile at [{path}]')
     q = queue.Queue()
     fd = s_common.genfile(path)
     # Seek to the end of the file. Allows a user to append to a file.
     fd.seek(0, 2)
     self.locs['log:fp'] = path
     self.locs['log:fd'] = fd
     self.locs['log:fmt'] = fmt
     self.locs['log:queue'] = q
     self.locs['log:thr'] = self.queueLoop()
     self.locs['log:nodesonly'] = nodes_only
     self.locs['log:splicesonly'] = splice_only
     self._cmd_cli.on('storm:mesg', self.onStormMesg)
Exemple #27
0
 def saveCertPem(self, cert, path):
     '''
     Save a certificate in PEM format to a file outside the certdir.
     '''
     with s_common.genfile(path) as fd:
         fd.truncate(0)
         fd.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
Exemple #28
0
    def _genPkeyCsr(self, name, mode, outp=None):
        pkey = crypto.PKey()
        pkey.generate_key(crypto.TYPE_RSA, self.crypto_numbits)

        xcsr = crypto.X509Req()
        xcsr.get_subject().CN = name

        xcsr.set_pubkey(pkey)
        xcsr.sign(pkey, self.signing_digest)

        keypath = self._savePkeyTo(pkey, mode, '%s.key' % name)
        if outp is not None:
            outp.printf('key saved: %s' % (keypath, ))

        csrpath = self._getPathJoin(mode, '%s.csr' % name)
        self._checkDupFile(csrpath)

        byts = crypto.dump_certificate_request(crypto.FILETYPE_PEM, xcsr)

        with s_common.genfile(csrpath) as fd:
            fd.truncate(0)
            fd.write(byts)

        if outp is not None:
            outp.printf('csr saved: %s' % (csrpath, ))

        return byts
Exemple #29
0
 def savePkeyPem(self, pkey, path):
     '''
     Save a private key in PEM format to a file outside the certdir.
     '''
     with s_common.genfile(path) as fd:
         fd.truncate(0)
         fd.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
    async def test_synnodes_remote(self):

        async with self.getTestCore() as core:

            await self.addCreatorDeleterRoles(core)

            host, port = await core.dmon.listen('tcp://127.0.0.1:0/')

            curl = f'tcp://*****:*****@{host}:{port}/'

            with self.getTestDir() as dirn:

                jsonlfp = s_common.genpath(dirn, 'podes.jsonl')
                with s_common.genfile(jsonlfp) as fd:
                    for i in range(20):
                        pode = (('test:int', i), {})
                        _ = fd.write(json.dumps(pode).encode() + b'\n')

                argv = [
                    '--cortex', curl, '--format', 'syn.nodes', '--modules',
                    'synapse.tests.utils.TestModule', '--chunksize', '3',
                    jsonlfp
                ]

                outp = self.getTestOutp()
                self.eq(await s_feed.main(argv, outp=outp), 0)

            nodes = await core.eval('test:int').list()
            self.len(20, nodes)
Exemple #31
0
 def openLogFd(self, opts):
     opath = self.locs.get('log:fp')
     if opath:
         self.printf(
             'Must call --off to disable current file before starting a new file.'
         )
         return
     fmt = opts.format
     path = opts.path
     nodes_only = opts.nodes_only
     edits_only = opts.edits_only
     if not path:
         ts = s_time.repr(s_common.now(), True)
         fn = f'storm_{ts}.{fmt}'
         path = s_common.getSynPath('stormlogs', fn)
     self.printf(f'Starting logfile at [{path}]')
     q = queue.Queue()
     fd = s_common.genfile(path)
     # Seek to the end of the file. Allows a user to append to a file.
     fd.seek(0, 2)
     self.locs['log:fp'] = path
     self.locs['log:fd'] = fd
     self.locs['log:fmt'] = fmt
     self.locs['log:queue'] = q
     self.locs['log:thr'] = self.queueLoop()
     self.locs['log:nodesonly'] = nodes_only
     self.locs['log:editsonly'] = edits_only
     self._cmd_cli.on('storm:mesg', self.onStormMesg)
Exemple #32
0
    async def runCmdOpts(self, opts):

        self.printf(f'exporting nodes')

        queryopts = {}
        if opts.include_tags:
            queryopts['scrub'] = {'include': {'tags': opts.include_tags}}

        if opts.no_tags:
            queryopts['scrub'] = {'include': {'tags': []}}

        try:
            query = opts.query[1:-1]
            with s_common.genfile(opts.filepath) as fd:
                cnt = 0
                async for pode in self._cmd_cli.item.exportStorm(
                        query, opts=queryopts):
                    byts = fd.write(s_msgpack.en(pode))
                    cnt += 1

            self.printf(f'saved {cnt} nodes to: {opts.filepath}')

        except asyncio.CancelledError as e:
            raise

        except s_exc.SynErr as e:
            self.printf(e.errinfo.get('mesg', str(e)))
    async def test_synsplice_remote(self):

        async with self.getTestCore() as core:

            await self.addCreatorDeleterRoles(core)

            host, port = await core.dmon.listen('tcp://127.0.0.1:0/')

            curl = f'tcp://*****:*****@{host}:{port}/'

            mesg = ('node:add', {'ndef': ('test:str', 'foo')})
            splicefp = s_common.genpath(core.dirn, 'splice.mpk')
            with s_common.genfile(splicefp) as fd:
                fd.write(s_msgpack.en(mesg))

            argv = [
                '--cortex', curl, '--format', 'syn.splice', '--modules',
                'synapse.tests.utils.TestModule', splicefp
            ]

            outp = self.getTestOutp()
            self.eq(await s_feed.main(argv, outp=outp), 0)

            nodes = await core.eval('test:str=foo').list()
            self.len(1, nodes)
Exemple #34
0
    async def _handle_get(self, core, opts):
        path = self.parsepath(opts.path)

        valu = await core.getHiveKey(path)
        if valu is None:
            self.printf(f'{opts.path} not present')
            return

        if opts.json:
            prend = json.dumps(valu, indent=4, sort_keys=True)
            rend = prend.encode()
        elif isinstance(valu, str):
            rend = valu.encode()
            prend = valu
        elif isinstance(valu, bytes):
            rend = valu
            prend = pprint.pformat(valu)
        else:
            rend = json.dumps(valu, indent=4, sort_keys=True).encode()
            prend = pprint.pformat(valu)

        if opts.file:
            with s_common.genfile(opts.file) as fd:
                fd.truncate(0)
                fd.write(rend)
            self.printf(f'Saved the hive entry [{opts.path}] to {opts.file}')
            return

        self.printf(f'{opts.path}:\n{prend}')
Exemple #35
0
    async def test_config_base(self):

        confdefs = (('foo', 20, int), )

        conf = s_config.Config(confdefs)

        with self.raises(s_exc.NoSuchName):
            await conf.set('hehe', 'haha')

        await conf.loadConfDict({'foo': 30})

        self.eq(conf.get('foo'), 30)

        with self.getTestDir() as dirn:

            path = os.path.join(dirn, 'foo.yaml')
            with s_common.genfile(path) as fd:
                fd.write(b'foo: 8080')

            await conf.loadConfYaml(path)

        self.eq(conf.get('foo'), 8080)

        with self.setTstEnvars(SYN_CONF_TEST_FOO='31337'):

            await conf.loadConfEnvs('SYN_CONF_TEST')

            self.eq(conf.get('foo'), 31337)

            info = dict(iter(conf))
            self.eq(31337, info.get('foo'))
Exemple #36
0
    def test_common_yaml(self):
        obj = [{'key': 1,
                'key2': [1, 2, 3],
                'key3': True,
                'key4': 'some str',
                'key5': {
                    'oh': 'my',
                    'we all': 'float down here'
                }, },
               'duck',
               False,
               'zero',
               0.1,
               ]
        with self.getTestDir() as dirn:
            s_common.yamlsave(obj, dirn, 'test.yaml')
            robj = s_common.yamlload(dirn, 'test.yaml')

            self.eq(obj, robj)

            obj = {'foo': 'bar', 'zap': [3, 4, 'f']}
            s_common.yamlsave(obj, dirn, 'test.yaml')
            s_common.yamlmod({'bar': 42}, dirn, 'test.yaml')
            robj = s_common.yamlload(dirn, 'test.yaml')
            obj['bar'] = 42
            self.eq(obj, robj)

            # Test yaml helper safety
            s = '!!python/object/apply:os.system ["pwd"]'
            with s_common.genfile(dirn, 'explode.yaml') as fd:
                fd.write(s.encode())
            self.raises(yaml.YAMLError, s_common.yamlload, dirn, 'explode.yaml')
Exemple #37
0
    async def test_tools_autodoc_stormpkg(self):

        with self.getTestDir() as path:

            ymlpath = s_t_files.getAssetPath('stormpkg/testpkg.yaml')

            argv = ['--savedir', path, '--doc-stormpkg', ymlpath]

            outp = self.getTestOutp()
            self.eq(await s_autodoc.main(argv, outp=outp), 0)

            with s_common.genfile(path, 'stormpkg_testpkg.rst') as fd:
                buf = fd.read()
            s = buf.decode()

            self.isin('Storm Package\\: testpkg', s)
            self.isin(
                'This documentation is generated for version 0.0.1 of the package.',
                s)
            self.isin('This package implements the following Storm Commands.',
                      s)
            self.isin('.. _stormcmd-testpkg-testpkgcmd', s)

            self.isin('testpkgcmd does some stuff', s)
            self.isin('Help on foo opt', s)
            self.isin('Help on bar opt', s)

            self.isin('forms as input nodes', s)
            self.isin('``test:str``', s)
            self.isin('nodes in the graph', s)
            self.isin('``test:int``', s)
            self.isin('nodedata with the following keys', s)
            self.isin('``testnd`` on ``inet:ipv4``', s)
Exemple #38
0
    async def test_tools_autodoc_stormsvc(self):

        with self.getTestDir() as path:

            argv = [
                '--savedir', path, '--doc-storm',
                'synapse.tests.test_lib_stormsvc.StormvarServiceCell'
            ]

            outp = self.getTestOutp()
            self.eq(await s_autodoc.main(argv, outp=outp), 0)

            with s_common.genfile(path,
                                  'stormsvc_stormvarservicecell.rst') as fd:
                buf = fd.read()
            s = buf.decode()

            self.isin('StormvarServiceCell Storm Service', s)
            self.isin(
                'This documentation is generated for version 0.0.1 of the service.',
                s)
            self.isin('Storm Package\\: stormvar', s)
            self.isin('.. _stormcmd-stormvar-magic:\n', s)
            self.isin('magic\n-----', s)
            self.isin('Test stormvar support', s)
            self.isin('forms as input nodes', s)
            self.isin('``test:str``', s)
            self.isin('nodes in the graph', s)
            self.isin('``test:comp``', s)
            self.isin('nodedata with the following keys', s)
            self.isin('``foo`` on ``inet:ipv4``', s)
Exemple #39
0
    def _saveCertTo(self, cert, *paths):
        path = self._getPathJoin(*paths)
        self._checkDupFile(path)

        with s_common.genfile(path) as fd:
            fd.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))

        return path
Exemple #40
0
    def _savePkeyTo(self, pkey, *paths):
        path = self._getPathJoin(*paths)
        self._checkDupFile(path)

        with s_common.genfile(path) as fd:
            fd.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))

        return path
Exemple #41
0
    def _saveP12To(self, cert, *paths):
        path = self._getPathJoin(*paths)
        self._checkDupFile(path)

        with s_common.genfile(path) as fd:
            fd.write(cert.export())

        return path
Exemple #42
0
    def test_msgpack_iterfd(self):
        t0 = ('5678', {'key': 1})
        t1 = ('1234', {'key': 'haha'})

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            for obj in (t0, t1):
                fd.write(s_msgpack.en(obj))
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(2, items)
            self.sorteq(items, [t0, t1])

            fd.close()
Exemple #43
0
    def test_msgpack_loadfile(self):
        t0 = ('5678', {'key': 1})
        t1 = ('1234', {'key': 'haha'})

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'oneobj.mpk')
            fd.write(s_msgpack.en(t0))
            fd.close()

            fd = s_common.genfile(fdir, 'twoobjs.mpk')
            for obj in (t0, t1):
                fd.write(s_msgpack.en(obj))
            fd.close()

            data = s_msgpack.loadfile(s_common.genpath(fdir, 'oneobj.mpk'))
            self.eq(data, ('5678', {'key': 1}))

            # Files containing multiple objects are not supported
            self.raises(msgpack.exceptions.ExtraData, s_msgpack.loadfile, s_common.genpath(fdir, 'twoobjs.mpk'))
    async def test_csvtool_local(self):

        with self.getTestDir() as dirn:

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm)

            argv = ['--csv-header', '--debug', '--test', '--logfile', logpath, stormpath, csvpath]
            outp = self.getTestOutp()

            await s_csvtool.main(argv, outp=outp)
            outp.expect('2 nodes (9 created)')
Exemple #45
0
    def test_certdir_importfile(self):
        with self.getCertDir() as cdir:  # type: s_certdir.CertDir
            with self.getTestDir() as testpath:

                # File doesn't exist
                fpath = s_common.genpath(testpath, 'not_real.crt')
                self.raises(s_exc.NoSuchFile, cdir.importFile, fpath, 'cas')

                # File has unsupported extension
                fpath = s_common.genpath(testpath, 'coolpic.bmp')
                with s_common.genfile(fpath) as fd:
                    self.raises(s_exc.BadFileExt, cdir.importFile, fpath, 'cas')

                tests = (
                    ('cas', 'coolca.crt'),
                    ('cas', 'coolca.key'),
                    ('hosts', 'coolhost.crt'),
                    ('hosts', 'coolhost.key'),
                    ('users', 'cooluser.crt'),
                    ('users', 'cooluser.key'),
                    ('users', 'cooluser.p12'),
                )
                for ftype, fname in tests:
                    srcpath = s_common.genpath(testpath, fname)
                    dstpath = s_common.genpath(cdir.path, ftype, fname)

                    with s_common.genfile(srcpath) as fd:
                        fd.write(b'arbitrary data')
                        fd.seek(0)

                        # Make sure the file is not there
                        self.raises(s_exc.NoSuchFile, s_common.reqfile, dstpath)

                        # Import it and make sure it exists
                        self.none(cdir.importFile(srcpath, ftype))
                        with s_common.reqfile(dstpath) as dstfd:
                            self.eq(dstfd.read(), b'arbitrary data')

                        # Make sure it can't be overwritten
                        self.raises(s_exc.FileExists, cdir.importFile, srcpath, ftype)
    def test_easycert_importfile(self):
        with self.getTestDir() as tstpath:

            outp = self.getTestOutp()
            fname = 'coolfile.crt'
            srcpath = s_common.genpath(tstpath, fname)
            ftype = 'cas'
            argv = ['--importfile', ftype, '--certdir', tstpath, srcpath]
            with s_common.genfile(srcpath) as fd:
                self.eq(s_easycert.main(argv, outp=outp), 0)

            outp = self.getTestOutp()
            fname = '*****@*****.**'
            srcpath = s_common.genpath(tstpath, fname)
            ftype = 'cas'
            argv = ['--importfile', ftype, '--certdir', tstpath, srcpath]
            with s_common.genfile(srcpath) as fd:
                self.eq(s_easycert.main(argv, outp=outp), 0)

            outp = self.getTestOutp()
            argv = ['--importfile', 'cas', '--certdir', tstpath, 'nope']
            self.raises(s_exc.NoSuchFile, s_easycert.main, argv, outp=outp)
    async def test_csvtool(self):

        async with self.getTestCore() as core:

            url = core.getLocalUrl()

            dirn = s_common.gendir(core.dirn, 'junk')

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm)

            argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath]
            outp = self.getTestOutp()

            await s_csvtool.main(argv, outp=outp)

            outp.expect('2 nodes (9 created)')
    async def test_csvtool_cli(self):

        with self.getTestDir() as dirn:

            logpath = s_common.genpath(dirn, 'csvtest.log')

            csvpath = s_common.genpath(dirn, 'csvtest.csv')
            with s_common.genfile(csvpath) as fd:
                fd.write(csvfile)

            stormpath = s_common.genpath(dirn, 'csvtest.storm')
            with s_common.genfile(stormpath) as fd:
                fd.write(csvstorm)

            argv = ['--csv-header', '--debug', '--cli', '--test', '--logfile', logpath, stormpath, csvpath]
            outp = self.getTestOutp()

            cmdg = s_t_utils.CmdGenerator(['storm --hide-props inet:fqdn', EOFError()])

            with self.withTestCmdr(cmdg):
                await s_csvtool.main(argv, outp=outp)

            outp.expect('inet:fqdn=google.com')
            outp.expect('2 nodes (9 created)')
Exemple #49
0
    async def _initCellHttp(self):

        self.httpds = []
        self.sessstor = s_lmdbslab.GuidStor(self.slab, 'http:sess')

        async def fini():
            for http in self.httpds:
                http.stop()

        self.onfini(fini)

        # Generate/Load a Cookie Secret
        secpath = os.path.join(self.dirn, 'cookie.secret')
        if not os.path.isfile(secpath):
            with s_common.genfile(secpath) as fd:
                fd.write(s_common.guid().encode('utf8'))

        with s_common.getfile(secpath) as fd:
            secret = fd.read().decode('utf8')

        opts = {
            'cookie_secret': secret,
            'websocket_ping_interval': 10
        }

        self.wapp = t_web.Application(**opts)

        self.addHttpApi('/api/v1/login', s_httpapi.LoginV1, {'cell': self})

        self.addHttpApi('/api/v1/auth/users', s_httpapi.AuthUsersV1, {'cell': self})
        self.addHttpApi('/api/v1/auth/roles', s_httpapi.AuthRolesV1, {'cell': self})

        self.addHttpApi('/api/v1/auth/adduser', s_httpapi.AuthAddUserV1, {'cell': self})
        self.addHttpApi('/api/v1/auth/addrole', s_httpapi.AuthAddRoleV1, {'cell': self})

        self.addHttpApi('/api/v1/auth/delrole', s_httpapi.AuthDelRoleV1, {'cell': self})

        self.addHttpApi('/api/v1/auth/user/(.*)', s_httpapi.AuthUserV1, {'cell': self})
        self.addHttpApi('/api/v1/auth/role/(.*)', s_httpapi.AuthRoleV1, {'cell': self})

        self.addHttpApi('/api/v1/auth/grant', s_httpapi.AuthGrantV1, {'cell': self})
        self.addHttpApi('/api/v1/auth/revoke', s_httpapi.AuthRevokeV1, {'cell': self})
Exemple #50
0
    def _genPkeyCsr(self, name, mode, outp=None):
        pkey = crypto.PKey()
        pkey.generate_key(crypto.TYPE_RSA, self.crypto_numbits)

        xcsr = crypto.X509Req()
        xcsr.get_subject().CN = name

        xcsr.set_pubkey(pkey)
        xcsr.sign(pkey, self.signing_digest)

        keypath = self._savePkeyTo(pkey, mode, '%s.key' % name)
        if outp is not None:
            outp.printf('key saved: %s' % (keypath,))

        csrpath = self._getPathJoin(mode, '%s.csr' % name)
        self._checkDupFile(csrpath)

        with s_common.genfile(csrpath) as fd:
            fd.write(crypto.dump_certificate_request(crypto.FILETYPE_PEM, xcsr))

        if outp is not None:
            outp.printf('csr saved: %s' % (csrpath,))
Exemple #51
0
def getDocData(fp, root=None):
    '''

    Args:
        fn (str): Name of the file to retrieve the data of.
        root (str): Optional root path to look for a docdata directory in.

    Notes:
        Will detect json/jsonl/yaml/mpk extensions and automatically
        decode that data if found; otherwise it returns bytes.

        Defaults to looking for the ``docdata`` directory in the current
        working directory. This behavior works fine for notebooks nested
        in the docs directory of synapse; but this root directory that
        is looked for may be overridden by providing an alternative root.

    Returns:
        data: May be deserialized data or bytes.

    Raises:
        ValueError if the file does not exist or directory traversal attempted..
    '''
    fpath = getDocPath(fp, root)
    if fpath.endswith('.yaml'):
        return s_common.yamlload(fpath)
    if fpath.endswith('.json'):
        return s_common.jsload(fpath)
    with s_common.genfile(fpath) as fd:
        if fpath.endswith('.mpk'):
            return s_msgpack.un(fd.read())
        if fpath.endswith('.jsonl'):
            recs = []
            for line in fd.readlines():
                recs.append(json.loads(line.decode()))
            return recs
        return fd.read()
Exemple #52
0
async def main(argv, outp=s_output.stdout):

    pars = makeargparser()

    try:
        opts = pars.parse_args(argv)
    except s_exc.ParserExit as e:
        return e.get('status')

    with open(opts.stormfile, 'r', encoding='utf8') as fd:
        text = fd.read()

    def iterrows():
        for path in opts.csvfiles:

            with open(path, 'r', encoding='utf8') as fd:

                if opts.csv_header:
                    fd.readline()

                def genr():

                    for row in csv.reader(fd):
                        yield row

                for rows in s_common.chunks(genr(), 1000):
                    yield rows

    rowgenr = iterrows()

    logfd = None
    if opts.logfile is not None:
        logfd = s_common.genfile(opts.logfile)

    async def addCsvData(core):

        newcount, nodecount = 0, 0

        for rows in rowgenr:

            stormopts = {
                'vars': {'rows': rows},
            }

            async for mesg in core.storm(text, opts=stormopts):

                if mesg[0] == 'node:add':
                    newcount += 1

                elif mesg[0] == 'node':
                    nodecount += 1

                elif mesg[0] == 'err' and not opts.debug:
                    outp.printf(repr(mesg))

                if opts.debug:
                    outp.printf(repr(mesg))

                if logfd is not None:
                    byts = json.dumps(mesg).encode('utf8')
                    logfd.write(byts + b'\n')

        if opts.cli:
            await s_cmdr.runItemCmdr(core, outp)

        return newcount, nodecount

    if opts.test:
        async with s_cortex.getTempCortex() as core:
            newcount, nodecount = await addCsvData(core)

    else:
        async with await s_telepath.openurl(opts.cortex) as core:
            newcount, nodecount = await addCsvData(core)

    if logfd is not None:
        logfd.close()

    outp.printf('%d nodes (%d created).' % (nodecount, newcount,))
Exemple #53
0
 def saveCertPem(self, cert, path):
     '''
     Save a certificate in PEM format to a file outside the certdir.
     '''
     with s_common.genfile(path) as fd:
         fd.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
Exemple #54
0
 def savePkeyPem(self, pkey, path):
     '''
     Save a private key in PEM format to a file outside the certdir.
     '''
     with s_common.genfile(path) as fd:
         fd.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
Exemple #55
0
    def test_syntest_helpers(self):
        # Execute all of the test helpers here
        self.len(2, (1, 2))

        self.le(1, 2)
        self.le(1, 1)
        self.lt(1, 2)
        self.ge(2, 1)
        self.ge(1, 1)
        self.gt(2, 1)

        self.isin('foo', ('foo', 'bar'))
        self.isin('foo', 'fooobarr')
        self.isin('foo', {'foo': 'bar'})
        self.isin('foo', {'foo', 'bar'})
        self.isin('foo', ['foo', 'bar'])

        self.notin('baz', ('foo', 'bar'))
        self.notin('baz', 'fooobarr')
        self.notin('baz', {'foo': 'bar'})
        self.notin('baz', {'foo', 'bar'})
        self.notin('baz', ['foo', 'bar'])

        self.isinstance('str', str)
        self.isinstance('str', (str, dict))

        self.sorteq((1, 2, 3), [2, 3, 1])

        def div0():
            return 1 / 0

        self.raises(ZeroDivisionError, div0)

        self.none(None)
        self.none({'foo': 'bar'}.get('baz'))

        self.nn(1)
        self.nn({'foo': 'bar'}.get('baz', 'woah'))

        self.true(True)
        self.true(1)
        self.true(-1)
        self.true('str')

        self.false(False)
        self.false(0)
        self.false('')
        self.false(())
        self.false([])
        self.false({})
        self.false(set())

        self.eq(True, 1)
        self.eq(False, 0)
        self.eq('foo', 'foo')
        self.eq({'1', '2'}, {'2', '1', '2'})
        self.eq({'key': 'val'}, {'key': 'val'})

        self.ne(True, 0)
        self.ne(False, 1)
        self.ne('foo', 'foobar')
        self.ne({'1', '2'}, {'2', '1', '2', '3'})
        self.ne({'key': 'val'}, {'key2': 'val2'})

        self.noprop({'key': 'valu'}, 'foo')

        with self.getTestDir() as fdir:
            self.true(os.path.isdir(fdir))
        self.false(os.path.isdir(fdir))

        # try mirroring an arbitrary direcotry
        with self.getTestDir() as fdir1:
            with s_common.genfile(fdir1, 'hehe.haha') as fd:
                fd.write('hehe'.encode())
            with self.getTestDir(fdir1) as fdir2:
                with s_common.genfile(fdir2, 'hehe.haha') as fd:
                    self.eq(fd.read(), 'hehe'.encode())

        outp = self.getTestOutp()
        self.isinstance(outp, s_output.OutPut)
Exemple #56
0
def main(argv):

    pars = argparse.ArgumentParser(prog='easycert', description=descr)

    pars.add_argument('--certdir', default='~/.syn/certs', help='Directory for certs/keys')
    pars.add_argument('--signas', help='sign the new cert with the given cert name')
    pars.add_argument('--ca', default=False, action='store_true', help='mark the certificate as a CA/CRL signer')
    pars.add_argument('--server', default=False, action='store_true', help='mark the certificate as a server')
    pars.add_argument('name', help='common name for the certificate')

    opts = pars.parse_args(argv)

    certdir = gendir(opts.certdir)

    pkeypath = os.path.join(certdir,'%s.key' % opts.name)
    certpath = os.path.join(certdir,'%s.crt' % opts.name)

    if os.path.exists(pkeypath):
        print('key exists: %s' % (pkeypath,))
        return(-1)

    if os.path.exists(certpath):
        print('cert exists: %s' % (certpath,))
        return(-1)

    pkey = crypto.PKey()
    pkey.generate_key(crypto.TYPE_RSA, 2048)

    cert = crypto.X509()
    cert.set_pubkey(pkey)
    cert.gmtime_adj_notBefore(0)
    cert.gmtime_adj_notAfter(10*365*24*60*60)
    cert.set_serial_number( int(time.time()) )

    if opts.ca:

        ext0 = crypto.X509Extension(b'basicConstraints',False,b'CA:TRUE')
        cert.add_extensions([ext0])

    else:

        keyuse = [b'digitalSignature']
        extuse = [b'clientAuth']
        certtype = b'client'


        if opts.server:
            certtype = b'server'
            extuse = [b'serverAuth']
            keyuse.append(b'keyEncipherment')

        ext0 = crypto.X509Extension(b'nsCertType',False,certtype)
        ext1 = crypto.X509Extension(b'keyUsage',False,b','.join(keyuse))

        extuse = b','.join(extuse)
        ext2 = crypto.X509Extension(b'extendedKeyUsage',False,extuse)
        ext3 = crypto.X509Extension(b'basicConstraints',False,b'CA:FALSE')

        cert.add_extensions([ext0,ext1,ext2,ext3])

    subj = cert.get_subject()
    subj.CN = opts.name

    signcert = cert
    signpkey = pkey

    if opts.signas:
        path = os.path.join(certdir,'%s.key' % (opts.signas,))
        byts = open(path,'rb').read()
        signpkey = crypto.load_privatekey(crypto.FILETYPE_PEM, byts)

        path = os.path.join(certdir,'%s.crt' % (opts.signas,))
        byts = open(path,'rb').read()
        signcert = crypto.load_certificate(crypto.FILETYPE_PEM, byts)

    cert.set_issuer( signcert.get_subject() )
    cert.sign( signpkey, 'sha1' )

    byts = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)

    with genfile(pkeypath) as fd:
        fd.write(byts)

    print('pkey saved: %s' % (pkeypath,))

    byts = crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
    with genfile(certpath) as fd:
        fd.write(byts)

    print('cert saved: %s' % (certpath,))
Exemple #57
0
    def test_common_file_helpers(self):
        # genfile
        with self.getTestDir() as testdir:
            fd = s_common.genfile(testdir, 'woot', 'foo.bin')
            fd.write(b'genfile_test')
            fd.close()

            with open(os.path.join(testdir, 'woot', 'foo.bin'), 'rb') as fd:
                buf = fd.read()
            self.eq(buf, b'genfile_test')

        # reqpath
        with self.getTestDir() as testdir:
            with s_common.genfile(testdir, 'test.txt') as fd:
                fd.write(b'')
            self.eq(os.path.join(testdir, 'test.txt'), s_common.reqpath(testdir, 'test.txt'))
            self.raises(s_exc.NoSuchFile, s_common.reqpath, testdir, 'newp')

        # reqfile
        with self.getTestDir() as testdir:
            with s_common.genfile(testdir, 'test.txt') as fd:
                fd.write(b'reqfile_test')
            fd = s_common.reqfile(testdir, 'test.txt')
            buf = fd.read()
            self.eq(buf, b'reqfile_test')
            fd.close()
            self.raises(s_exc.NoSuchFile, s_common.reqfile, testdir, 'newp')

        # getfile
        with self.getTestDir() as testdir:
            with s_common.genfile(testdir, 'test.txt') as fd:
                fd.write(b'getfile_test')
            fd = s_common.getfile(testdir, 'test.txt')
            buf = fd.read()
            self.eq(buf, b'getfile_test')
            fd.close()
            self.none(s_common.getfile(testdir, 'newp'))

        # getbytes
        with self.getTestDir() as testdir:
            with s_common.genfile(testdir, 'test.txt') as fd:
                fd.write(b'getbytes_test')
            buf = s_common.getbytes(testdir, 'test.txt')
            self.eq(buf, b'getbytes_test')
            self.none(s_common.getbytes(testdir, 'newp'))

        # reqbytes
        with self.getTestDir() as testdir:
            with s_common.genfile(testdir, 'test.txt') as fd:
                fd.write(b'reqbytes_test')
            buf = s_common.reqbytes(testdir, 'test.txt')
            self.eq(buf, b'reqbytes_test')
            self.raises(s_exc.NoSuchFile, s_common.reqbytes, testdir, 'newp')

        # listdir
        with self.getTestDir() as dirn:
            path = os.path.join(dirn, 'woot.txt')
            with open(path, 'wb') as fd:
                fd.write(b'woot')

            os.makedirs(os.path.join(dirn, 'nest'))
            with open(os.path.join(dirn, 'nest', 'nope.txt'), 'wb') as fd:
                fd.write(b'nope')

            retn = tuple(s_common.listdir(dirn))
            self.len(2, retn)

            retn = tuple(s_common.listdir(dirn, glob='*.txt'))
            self.eq(retn, ((path,)))