def _getTankIden(self): path = s_common.genpath(self.dirn, 'guid') if os.path.isfile(path): with open(path, 'r') as fd: return fd.read().strip() # legacy cell code... cellpath = s_common.genpath(self.dirn, 'cell.guid') if os.path.isfile(cellpath): with open(cellpath, 'r') as fd: iden = fd.read().strip() with open(path, 'w') as fd: fd.write(iden) os.unlink(cellpath) return iden iden = s_common.guid() with open(path, 'w') as fd: fd.write(iden) return iden
async def test_tools_genpkg(self): ymlpath = s_common.genpath(dirname, 'files', 'stormpkg', 'testpkg.yml') async with self.getTestCore() as core: savepath = s_common.genpath(core.dirn, 'testpkg.json') url = core.getLocalUrl() argv = ('--push', url, '--save', savepath, ymlpath) await s_genpkg.main(argv) await core.callStorm('testcmd') await core.callStorm('$lib.import(testmod)') pdef = s_common.yamlload(savepath) self.eq(pdef['name'], 'testpkg') self.eq(pdef['version'], (0, 0, 1)) self.eq(pdef['modules'][0]['name'], 'testmod') self.eq(pdef['modules'][0]['storm'], 'inet:ipv4\n') self.eq(pdef['commands'][0]['name'], 'testcmd') self.eq(pdef['commands'][0]['storm'], 'inet:ipv6\n') self.eq(pdef['optic']['files']['index.html']['file'], 'aGkK')
async def test_csvtool_cli(self): with self.getTestDir() as dirn: logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) argv = [ '--csv-header', '--debug', '--cli', '--test', '--logfile', logpath, stormpath, csvpath ] outp = self.getTestOutp() cmdg = s_t_utils.CmdGenerator(['storm --hide-props inet:fqdn'], on_end=EOFError) with mock.patch('synapse.lib.cli.get_input', cmdg): await s_coro.executor(s_csvtool.main, argv, outp=outp) outp.expect('inet:fqdn=google.com') outp.expect('2 nodes (9 created)')
def genauth(opts, outp=s_output.stdout): authpath = s_common.genpath(opts.authfile) savepath = s_common.genpath(opts.savepath) if not os.path.isfile(authpath): outp.printf('auth file not found: %s' % (authpath, )) return auth = s_msgpack.loadfile(authpath) addr = auth[1].get('neuron') if addr is None: outp.printf('auth file has no neuron info: %s' % (authpath, )) return celluser = s_cell.CellUser(auth) with celluser.open(addr, timeout=20) as sess: nuro = s_neuron.NeuronClient(sess) auth = nuro.genCellAuth(opts.cellname, timeout=20) s_msgpack.dumpfile(auth, savepath) outp.printf('saved %s: %s' % (opts.cellname, savepath))
def test_tools_loadpkgproto_readonly(self): self.thisHostMustNot(platform='windows') readonly_mode = stat.S_IREAD | stat.S_IRGRP | stat.S_IROTH srcpath = s_common.genpath(dirname, 'files', 'stormpkg') with self.getTestDir(copyfrom=srcpath) as dirn: ymlpath = s_common.genpath(dirn, 'testpkg.yaml') self.setDirFileModes(dirn=dirn, mode=readonly_mode) self.skipIfWriteableFiles(dirn) with self.raises(PermissionError): s_genpkg.tryLoadPkgProto(ymlpath) pkg = s_genpkg.tryLoadPkgProto(ymlpath, readonly=True) self.eq(pkg.get('name'), 'testpkg') self.eq(pkg.get('modules')[0].get('storm'), 'inet:ipv4\n') self.eq(pkg.get('commands')[0].get('storm'), 'inet:ipv6\n') # Missing files are still a problem with self.getTestDir(copyfrom=srcpath) as dirn: ymlpath = s_common.genpath(dirn, 'testpkg.yaml') os.unlink(os.path.join(dirn, 'storm', 'modules', 'testmod')) self.setDirFileModes(dirn=dirn, mode=readonly_mode) with self.raises(s_exc.NoSuchFile) as cm: s_genpkg.tryLoadPkgProto(ymlpath, readonly=True) self.isin('storm/modules/testmod', cm.exception.get('path')) with self.getTestDir(copyfrom=srcpath) as dirn: ymlpath = s_common.genpath(dirn, 'testpkg.yaml') os.remove(os.path.join(dirn, 'storm', 'commands', 'testpkgcmd')) self.setDirFileModes(dirn=dirn, mode=readonly_mode) with self.raises(s_exc.NoSuchFile) as cm: s_genpkg.tryLoadPkgProto(ymlpath, readonly=True) self.isin('storm/commands/testpkgcmd', cm.exception.get('path'))
def loadPkgProto(path, opticdir=None): full = s_common.genpath(path) pkgdef = s_common.yamlload(full) if isinstance(pkgdef['version'], str): pkgdef['version'] = chopSemVer(pkgdef['version']) protodir = os.path.dirname(full) for mod in pkgdef.get('modules', ()): name = mod.get('name') with s_common.genfile(protodir, 'storm', 'modules', name) as fd: mod['storm'] = fd.read().decode() for cmd in pkgdef.get('commands', ()): name = cmd.get('name') with s_common.genfile(protodir, 'storm', 'commands', name) as fd: cmd['storm'] = fd.read().decode() if opticdir is None: opticdir = s_common.genpath(protodir, 'optic') if os.path.isdir(opticdir): pkgdef.setdefault('optic', {}) pkgdef['optic'].setdefault('files', {}) loadOpticFiles(pkgdef, opticdir) return pkgdef
def test_doc_data(self): with self.getTestDir() as dirn: s_common.gendir(dirn, 'docdata', 'stuff') docdata = s_common.genpath(dirn, 'docdata') root = s_common.genpath(dirn, 'synapse', 'userguides') d = {'key': 'value'} s_common.jssave(d, docdata, 'data.json') s_common.yamlsave(d, docdata, 'data.yaml') s_msgpack.dumpfile(d, os.path.join(docdata, 'data.mpk')) with s_common.genfile(docdata, 'stuff', 'data.txt') as fd: fd.write('beep'.encode()) with s_common.genfile(docdata, 'data.jsonl') as fd: fd.write(json.dumps(d).encode() + b'\n') fd.write(json.dumps(d).encode() + b'\n') fd.write(json.dumps(d).encode() + b'\n') data = s_jupyter.getDocData('data.json', root) self.eq(data, d) data = s_jupyter.getDocData('data.yaml', root) self.eq(data, d) data = s_jupyter.getDocData('data.mpk', root) self.eq(data, d) data = s_jupyter.getDocData('stuff/data.txt', root) self.eq(data, b'beep') data = s_jupyter.getDocData('data.jsonl', root) self.eq(data, [d, d, d]) self.raises(ValueError, s_jupyter.getDocData, 'newp.bin', root) self.raises(ValueError, s_jupyter.getDocData, '../../../../../../etc/passwd', root)
async def test_csvtool_cli(self): with self.getTestDir() as dirn: logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) argv = [ '--csv-header', '--debug', '--cli', '--test', '--logfile', logpath, stormpath, csvpath ] outp = self.getTestOutp() cmdg = s_t_utils.CmdGenerator([ 'storm --hide-props inet:fqdn', EOFError(), ]) with self.withCliPromptMockExtendOutp(outp): with self.withTestCmdr(cmdg): await s_csvtool.main(argv, outp=outp) outp.expect('inet:fqdn=google.com') outp.expect('2 nodes')
async def test_telepath_loadenv(self): with self.getTestDir() as dirn: certpath = s_common.gendir(dirn, 'certs') newppath = s_common.genpath(dirn, 'newps') conf = { 'version': 1, 'aha:servers': [ 'tcp://localhost:9999/', ], 'certdirs': [ certpath, newppath, ], } path = s_common.genpath(dirn, 'telepath.yaml') s_common.yamlsave(conf, path) fini = await s_telepath.loadTeleEnv(path) await fini() self.none(await s_telepath.loadTeleEnv(newppath)) conf['version'] = 99 s_common.yamlsave(conf, path) self.none(await s_telepath.loadTeleEnv(path))
async def test_csvtool(self): async with self.getTestCore() as core: url = core.getLocalUrl() dirn = s_common.gendir(core.dirn, 'junk') logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) argv = [ '--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath ] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('oh hai') outp.expect('2 nodes') with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers): outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect( 'Cortex version 0.0.0 is outside of the csvtool supported range' )
async def test_csvtool(self): async with self.getTestCore() as core: url = core.getLocalUrl() dirn = s_common.gendir(core.dirn, 'junk') logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) argv = [ '--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath ] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('oh hai') outp.expect('2 nodes')
async def test_csvtool(self): async with self.getTestDmon(mirror='dmoncore') as dmon: url = self.getTestUrl(dmon, 'core') with self.getTestDir() as dirn: logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) podes = [] argv = [ '--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath ] outp = self.getTestOutp() await s_coro.executor(s_csvtool.main, argv, outp=outp) outp.expect('2 nodes (9 created)')
async def test_csvtool_missingvals(self): async with self.getTestCore() as core: url = core.getLocalUrl() dirn = s_common.gendir(core.dirn, 'junk') logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile_missing) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm_missing) argv = [ '--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath ] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('hello hello') outp.expect("'fqdn': 'google.com'") outp.expect('3 nodes')
async def main(argv, outp=None): if outp is None: outp = s_output.OutPut() pars = makeargparser() opts = pars.parse_args(argv) if opts.doc_model: if opts.cortex: async with await s_telepath.openurl(opts.cortex) as core: rsttypes, rstforms = await docModel(outp, core) else: async with s_cortex.getTempCortex() as core: rsttypes, rstforms = await docModel(outp, core) if opts.savedir: with open(s_common.genpath(opts.savedir, 'datamodel_types.rst'), 'wb') as fd: fd.write(rsttypes.getRstText().encode()) with open(s_common.genpath(opts.savedir, 'datamodel_forms.rst'), 'wb') as fd: fd.write(rstforms.getRstText().encode()) return 0
async def test_storm_http_inject_ca(self): with self.getTestDir() as dirn: cdir = s_common.gendir(dirn, 'certs') cadir = s_common.gendir(cdir, 'cas') tdir = s_certdir.CertDir(cdir) tdir.genCaCert('somelocalca') tdir.genHostCert('localhost', signas='somelocalca') localkeyfp = tdir.getHostKeyPath('localhost') localcertfp = tdir.getHostCertPath('localhost') shutil.copyfile(localkeyfp, s_common.genpath(dirn, 'sslkey.pem')) shutil.copyfile(localcertfp, s_common.genpath(dirn, 'sslcert.pem')) tlscadir = s_common.gendir(dirn, 'cadir') for fn in os.listdir(cadir): if fn.endswith('.crt'): shutil.copyfile(os.path.join(cadir, fn), os.path.join(tlscadir, fn)) async with self.getTestCore(dirn=dirn) as core: root = await core.auth.getUserByName('root') await root.setPasswd('root') addr, port = await core.addHttpsPort(0) core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core}) url = f'https://*****:*****@localhost:{port}/api/v0/test' opts = {'vars': {'url': url}} q = ''' $params=((foo, bar), (key, valu)) $resp = $lib.inet.http.get($url, params=$params) return ( ($resp.code, $resp.err) ) ''' code, (errname, _) = await core.callStorm(q, opts=opts) self.eq(code, -1) self.eq('ClientConnectorCertificateError', errname) conf = {'tls:ca:dir': tlscadir} async with self.getTestCore(dirn=dirn, conf=conf) as core: addr, port = await core.addHttpsPort(0) core.addHttpApi('/api/v0/test', s_test.HttpReflector, {'cell': core}) url = f'https://*****:*****@localhost:{port}/api/v0/test' opts = {'vars': {'url': url}} q = ''' $params=((foo, bar), (key, valu)) $resp = $lib.inet.http.get($url, params=$params) return ( $resp.json() ) ''' resp = await core.callStorm(q, opts=opts) data = resp.get('result') self.eq(data.get('params'), { 'key': ('valu', ), 'foo': ('bar', ) })
async def test_axon_tlscapath(self): with self.getTestDir() as dirn: cdir = s_common.gendir(dirn, 'certs') cadir = s_common.gendir(cdir, 'cas') tdir = s_certdir.CertDir(cdir) tdir.genCaCert('somelocalca') tdir.genHostCert('localhost', signas='somelocalca') localkeyfp = tdir.getHostKeyPath('localhost') localcertfp = tdir.getHostCertPath('localhost') shutil.copyfile(localkeyfp, s_common.genpath(dirn, 'sslkey.pem')) shutil.copyfile(localcertfp, s_common.genpath(dirn, 'sslcert.pem')) tlscadir = s_common.gendir(dirn, 'cadir') for fn in os.listdir(cadir): if fn.endswith('.crt'): shutil.copyfile(os.path.join(cadir, fn), os.path.join(tlscadir, fn)) conf = {'auth:passwd': 'root'} async with self.getTestAxon(dirn=dirn, conf=conf) as axon: host, port = await axon.addHttpsPort(0, host='127.0.0.1') url = f'https://*****:*****@127.0.0.1:{port}/api/v1/active' resp = await axon.wget(url) self.false(resp.get('ok')) self.isin('unable to get local issuer certificate', resp.get('mesg')) retn = await axon.put(abuf) self.eq(retn, asdfretn) axon.addHttpApi('/api/v1/pushfile', HttpPushFile, {'cell': axon}) url = f'https://*****:*****@127.0.0.1:{port}/api/v1/pushfile' resp = await axon.wput(asdfhash, url) self.false(resp.get('ok')) self.isin('unable to get local issuer certificate', resp.get('mesg')) resp = await axon.postfiles(fields, url) self.false(resp.get('ok')) self.isin('unable to get local issuer certificate', resp.get('err')) conf = {'auth:passwd': 'root', 'tls:ca:dir': tlscadir} async with self.getTestAxon(dirn=dirn, conf=conf) as axon: host, port = await axon.addHttpsPort(0, host='127.0.0.1') url = f'https://*****:*****@localhost:{port}/api/v1/active' resp = await axon.wget(url) self.true(resp.get('ok')) retn = await axon.put(abuf) self.eq(retn, asdfretn) axon.addHttpApi('/api/v1/pushfile', HttpPushFile, {'cell': axon}) url = f'https://*****:*****@localhost:{port}/api/v1/pushfile' resp = await axon.wput(asdfhash, url) self.true(resp.get('ok')) resp = await axon.postfiles(fields, url) self.true(resp.get('ok'))
async def test_cell_nexuschanges(self): with self.getTestDir() as dirn: dir0 = s_common.genpath(dirn, 'cell00') dir1 = s_common.genpath(dirn, 'cell01') async def coro(prox, offs): retn = [] yielded = False async for offset, data in prox.getNexusChanges(offs): yielded = True nexsiden, act, args, kwargs, meta = data if nexsiden == 'auth:auth' and act == 'user:add': retn.append(args) break return yielded, retn conf = { 'nexslog:en': True, 'nexslog:async': True, 'dmon:listen': 'tcp://127.0.0.1:0/', 'https:port': 0, } async with await s_cell.Cell.anit(dir0, conf=conf) as cell00, \ cell00.getLocalProxy() as prox00: self.true(cell00.nexsroot.map_async) self.true(cell00.nexsroot.donexslog) await prox00.addUser('test') self.true(await prox00.getNexsIndx() > 0) # We should have a set of auth:auth changes to find task = cell00.schedCoro(coro(prox00, 0)) yielded, data = await asyncio.wait_for(task, 6) self.true(yielded) usernames = [args[1] for args in data] self.eq(usernames, ['test']) # Disable change logging for this cell. conf = {'nexslog:en': False} async with await s_cell.Cell.anit(dir1, conf=conf) as cell01, \ cell01.getLocalProxy() as prox01: self.false(cell01.nexsroot.donexslog) await prox01.addUser('test') task = cell01.schedCoro(coro(prox01, 0)) yielded, data = await asyncio.wait_for(task, 6) self.false(yielded) self.eq(data, [])
def backup(srcdir, dstdir, compact=True): ''' Args: compact (bool): whether to optimize storage while copying to the destination ''' tick = s_common.now() srcdir = s_common.reqdir(srcdir) dstdir = s_common.gendir(dstdir) logger.info(f'Starting backup of [{srcdir}]') logger.info(f'Destination dir: [{dstdir}]') for root, dnames, fnames in os.walk(srcdir, topdown=True): relpath = os.path.relpath(root, start=srcdir) for name in list(dnames): # Explicitly skip directory names of 'tmp' to avoid backing up temporary files if name == 'tmp': dnames.remove(name) continue srcpath = s_common.genpath(root, name) dstpath = s_common.genpath(dstdir, relpath, name) if name.endswith('.lmdb'): dnames.remove(name) backup_lmdb(srcpath, dstpath) continue logger.info(f'making dir:{dstpath}') s_common.gendir(dstpath) for name in fnames: srcpath = s_common.genpath(root, name) # skip unix sockets etc... if not os.path.isfile(srcpath): continue dstpath = s_common.genpath(dstdir, relpath, name) logger.info(f'copying: {srcpath} -> {dstpath}') shutil.copy(srcpath, dstpath) tock = s_common.now() logger.info(f'Backup complete. Took [{tock-tick:.2f}] for [{srcdir}]') return
async def test_csvtool_export(self): async with self.getTestCore() as core: await core.nodes('[ test:int=20 :loc=us ]') await core.nodes('[ test:int=30 :loc=cn ]') await core.nodes('[ test:int=40 ]') url = core.getLocalUrl() dirn = s_common.gendir(core.dirn, 'junk') csvpath = s_common.genpath(dirn, 'csvtest.csv') stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm_export) # test a few no-no cases argv = ['--test', '--export', stormpath, csvpath] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('--export requires --cortex') argv = ['--cortex', url, '--export', stormpath, csvpath, 'lol.csv'] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('--export requires exactly 1 csvfile') argv = ['--cortex', url, '--export', stormpath, csvpath] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('Counted 3 nodes.') outp.expect('3 csv rows') with open(csvpath, 'r') as fd: rows = [row for row in csv.reader(fd)] self.eq(rows, (['20', 'us'], ['30', 'cn'], ['40', ''])) with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers): outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect( f'Cortex version 0.0.0 is outside of the csvtool supported range' )
async def test_synsplice_remote(self): async with self.getTestCore() as core: await self.addCreatorDeleterRoles(core) host, port = await core.dmon.listen('tcp://127.0.0.1:0/') curl = f'tcp://*****:*****@{host}:{port}/' mesg = ('node:add', {'ndef': ('test:str', 'foo')}) splicefp = s_common.genpath(core.dirn, 'splice.mpk') with s_common.genfile(splicefp) as fd: fd.write(s_msgpack.en(mesg)) argv = [ '--cortex', curl, '--format', 'syn.splice', '--modules', 'synapse.tests.utils.TestModule', splicefp ] outp = self.getTestOutp() self.eq(await s_feed.main(argv, outp=outp), 0) nodes = await core.eval('test:str=foo').list() self.len(1, nodes)
def getSlabsInDir(clas, dirn): ''' Returns all open slabs under a directory ''' toppath = s_common.genpath(dirn) return [slab for slab in clas.allslabs.values() if toppath == slab.path or slab.path.startswith(toppath + os.sep)]
async def test_backup(self): async with self.getTestCore() as core: layriden = core.getLayer().iden # For additional complication, open a spooled set that shouldn't be backed up async with await s_spooled.Set.anit(dirn=core.dirn, size=2) as sset: await sset.add(10) await sset.add(20) await sset.add(30) await core.fini() # Avoid having the same DB open twice with self.getTestDir() as dirn2: argv = (core.dirn, dirn2) self.eq(0, s_backup.main(argv)) fpset = self.compare_dirs(core.dirn, dirn2, skipfns={'lock.mdb'}, skipdirs={'tmp'}) self.false(os.path.exists(s_common.genpath(dirn2, 'tmp'))) # We expect the data.mdb file to be in the fpset self.isin(f'/layers/{layriden}/layer_v2.lmdb/data.mdb', fpset)
def parsePath(*paths): ''' function to parse the incoming path. lists of paths are joined prior to parsing ''' if None in paths: return None path = s_common.genpath(*paths) path_parts = getPathParts(path) base = None oldbases = [] try: cls = _pathClass(path_parts[0]) base = cls(path_parts, 0, parent=None) nbase = base.next() while nbase: base = nbase nbase = base.next() if nbase: oldbases.append(base) except s_common.NoSuchPath as e: return None finally: [b.close() for b in oldbases] return base
async def test_syningest_remote(self): async with self.getTestCore() as core: guid = s_common.guid() seen = s_common.now() gestdef = self.getIngestDef(guid, seen) with self.getTestDir() as dirn: # Test yaml support here gestfp = s_common.genpath(dirn, 'gest.yaml') s_common.yamlsave(gestdef, gestfp) argv = ['--cortex', core.getLocalUrl(), '--debug', '--modules', 'synapse.tests.utils.TestModule', gestfp] outp = self.getTestOutp() cmdg = s_t_utils.CmdGenerator(['storm test:pivcomp -> *', EOFError()]) with self.withCliPromptMockExtendOutp(outp): with self.withTestCmdr(cmdg): self.eq(await s_feed.main(argv, outp=outp), 0) self.true(outp.expect('test:str=haha', throw=False)) self.true(outp.expect('test:pivtarg=hehe', throw=False))
async def test_synnodes_offset(self): async with self.getTestCore() as core: await self.addCreatorDeleterRoles(core) host, port = await core.dmon.listen('tcp://127.0.0.1:0/') curl = f'tcp://*****:*****@{host}:{port}/' with self.getTestDir() as dirn: mpkfp = s_common.genpath(dirn, 'podes.mpk') with s_common.genfile(mpkfp) as fd: for i in range(20): pode = (('test:int', i), {}) fd.write(s_msgpack.en(pode)) argv = ['--cortex', curl, '--format', 'syn.nodes', '--modules', 'synapse.tests.utils.TestModule', '--chunksize', '4', '--offset', '15', mpkfp] outp = self.getTestOutp() self.eq(await s_feed.main(argv, outp=outp), 0) # Sad path catch outp = self.getTestOutp() argv.append(mpkfp) self.eq(await s_feed.main(argv, outp=outp), 1) self.true(outp.expect('Cannot start from a arbitrary offset for more than 1 file.')) nodes = await core.eval('test:int').list() self.len(8, nodes)
def main(dirn, conf=None): ''' Initialize and execute the main loop for a Cell. Args: dirn (str): Directory backing the Cell data. conf (dict): Configuration dictionary. Notes: This ends up calling ``main()`` on the Cell, and does not return anything. It cals sys.exit() at the end of its processing. ''' try: # Configure logging since we may have come in via # multiprocessing.Process as part of a Daemon config. s_common.setlogging(logger, os.getenv('SYN_TEST_LOG_LEVEL', 'WARNING')) dirn = s_common.genpath(dirn) ctor, func = getCellCtor(dirn, conf=conf) cell = func(dirn, conf) port = cell.getCellPort() logger.warning('cell divided: %s (%s) port: %d' % (ctor, dirn, port)) cell.main() sys.exit(0) except Exception as e: logger.exception('main: %s (%s)' % (dirn, e)) sys.exit(1)
async def test_synnodes_remote(self): async with self.getTestCore() as core: await self.addCreatorDeleterRoles(core) host, port = await core.dmon.listen('tcp://127.0.0.1:0/') curl = f'tcp://*****:*****@{host}:{port}/' with self.getTestDir() as dirn: jsonlfp = s_common.genpath(dirn, 'podes.jsonl') with s_common.genfile(jsonlfp) as fd: for i in range(20): pode = (('test:int', i), {}) _ = fd.write(json.dumps(pode).encode() + b'\n') argv = [ '--cortex', curl, '--format', 'syn.nodes', '--modules', 'synapse.tests.utils.TestModule', '--chunksize', '3', jsonlfp ] outp = self.getTestOutp() self.eq(await s_feed.main(argv, outp=outp), 0) nodes = await core.eval('test:int').list() self.len(20, nodes)
async def init(self, name, conf=None): ''' Generate a new CryoTank with a given name or get an reference to an existing CryoTank. Args: name (str): Name of the CryoTank. Returns: CryoTank: A CryoTank instance. ''' tank = self.tanks.get(name) if tank is not None: return tank iden = s_common.guid() logger.info('Creating new tank: %s', name) path = s_common.genpath(self.dirn, 'tanks', iden) tank = await CryoTank.anit(path, conf) node = await self.names.open((name, )) await node.set((iden, conf)) self.tanks.put(name, tank) return tank
async def init(self, name, conf=None): ''' Generate a new CryoTank with a given name or get an reference to an existing CryoTank. Args: name (str): Name of the CryoTank. Returns: CryoTank: A CryoTank instance. ''' tank = self.tanks.get(name) if tank is not None: return tank iden = s_common.guid() logger.info('Creating new tank: %s', name) path = s_common.genpath(self.dirn, 'tanks', iden) tank = await CryoTank.anit(path, conf) node = await self.names.open((name,)) await node.set((iden, conf)) self.tanks.put(name, tank) return tank
async def main(argv, outp=s_output.stdout): pars = argparse.ArgumentParser() pars.add_argument('--push', metavar='<url>', help='A telepath URL of a Cortex or PkgRepo.') pars.add_argument('--save', metavar='<path>', help='Save the completed package JSON to a file.') pars.add_argument('--optic', metavar='<path>', help='Load Optic module files from a directory.') pars.add_argument('pkgfile', metavar='<pkgfile>', help='Path to a storm package prototype yml file.') opts = pars.parse_args(argv) pkgdef = loadPkgProto(opts.pkgfile, opticdir=opts.optic) if opts.save: s_common.jssave(pkgdef, opts.save) if opts.push: path = s_common.genpath('~/.syn/telepath.yaml') fini = await s_telepath.loadTeleEnv(path) async with await s_telepath.openurl(opts.push) as core: await core.addStormPkg(pkgdef) if fini is not None: # pragma: no cover await fini() return 0
async def test_lib_aha_loadenv(self): with self.getTestDir() as dirn: async with self.getTestAha() as aha: host, port = await aha.dmon.listen('tcp://127.0.0.1:0') await aha.auth.rootuser.setPasswd('hehehaha') conf = { 'version': 1, 'aha:servers': [ f'tcp://*****:*****@127.0.0.1:{port}/', ], } path = s_common.genpath(dirn, 'telepath.yaml') s_common.yamlsave(conf, path) fini = await s_telepath.loadTeleEnv(path) # Should be one uninitialized aha client self.len(1, s_telepath.aha_clients) [info] = s_telepath.aha_clients.values() self.none(info.get('client')) with self.raises(s_exc.NoSuchName): await s_telepath.openurl('aha://[email protected]') # Connecting to an aha url should have initialized the client self.len(1, s_telepath.aha_clients) self.nn(info.get('client')) await fini()
def test_msgpack_loadfile(self): t0 = ('5678', {'key': 1}) t1 = ('1234', {'key': 'haha'}) with self.getTestDir() as fdir: fd = s_common.genfile(fdir, 'oneobj.mpk') fd.write(s_msgpack.en(t0)) fd.close() fd = s_common.genfile(fdir, 'twoobjs.mpk') for obj in (t0, t1): fd.write(s_msgpack.en(obj)) fd.close() data = s_msgpack.loadfile(s_common.genpath(fdir, 'oneobj.mpk')) self.eq(data, ('5678', {'key': 1})) # Files containing multiple objects are not supported self.raises(msgpack.exceptions.ExtraData, s_msgpack.loadfile, s_common.genpath(fdir, 'twoobjs.mpk'))
def getCaCerts(self): ''' Return a list of CA certs from the CertDir. Returns: [OpenSSL.crypto.X509]: List of CA certificates. ''' retn = [] path = s_common.genpath(self.certdir, 'cas') for name in os.listdir(path): if not name.endswith('.crt'): continue full = s_common.genpath(self.certdir, 'cas', name) retn.append(self._loadCertPath(full)) return retn
async def test_csvtool_local(self): with self.getTestDir() as dirn: logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) argv = ['--csv-header', '--debug', '--test', '--logfile', logpath, stormpath, csvpath] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('2 nodes (9 created)')
async def test_longpath(self): # This is similar to the DaemonTest::test_unixsock_longpath # but exercises the long-path failure inside of the cell's daemon # instead. with self.getTestDir() as dirn: extrapath = 108 * 'A' longdirn = s_common.genpath(dirn, extrapath) with self.getAsyncLoggerStream('synapse.lib.cell', 'LOCAL UNIX SOCKET WILL BE UNAVAILABLE') as stream: async with await s_cell.Cell.anit(longdirn) as cell: self.none(cell.dmon.addr) self.true(await stream.wait(1))
def backup(srcdir, dstdir): tick = s_common.now() srcdir = s_common.reqdir(srcdir) dstdir = s_common.gendir(dstdir) logger.info(f'Starting backup of [{srcdir}]') logger.info(f'Destination dir: [{dstdir}]') for root, dnames, fnames in os.walk(srcdir, topdown=True): relpath = os.path.relpath(root, start=srcdir) for name in list(dnames): srcpath = s_common.genpath(root, name) dstpath = s_common.genpath(dstdir, relpath, name) if name.endswith('.lmdb'): dnames.remove(name) backup_lmdb(srcpath, dstpath) continue logger.info(f'making dir:{dstpath}') s_common.gendir(dstpath) for name in fnames: srcpath = s_common.genpath(root, name) # skip unix sockets etc... if not os.path.isfile(srcpath): continue dstpath = s_common.genpath(dstdir, relpath, name) logger.info(f'copying: {srcpath} -> {dstpath}') shutil.copy(srcpath, dstpath) tock = s_common.now() logger.info(f'Backup complete. Took [{tock-tick:.2f}] for [{srcdir}]') return
def test_certdir_importfile(self): with self.getCertDir() as cdir: # type: s_certdir.CertDir with self.getTestDir() as testpath: # File doesn't exist fpath = s_common.genpath(testpath, 'not_real.crt') self.raises(s_exc.NoSuchFile, cdir.importFile, fpath, 'cas') # File has unsupported extension fpath = s_common.genpath(testpath, 'coolpic.bmp') with s_common.genfile(fpath) as fd: self.raises(s_exc.BadFileExt, cdir.importFile, fpath, 'cas') tests = ( ('cas', 'coolca.crt'), ('cas', 'coolca.key'), ('hosts', 'coolhost.crt'), ('hosts', 'coolhost.key'), ('users', 'cooluser.crt'), ('users', 'cooluser.key'), ('users', 'cooluser.p12'), ) for ftype, fname in tests: srcpath = s_common.genpath(testpath, fname) dstpath = s_common.genpath(cdir.path, ftype, fname) with s_common.genfile(srcpath) as fd: fd.write(b'arbitrary data') fd.seek(0) # Make sure the file is not there self.raises(s_exc.NoSuchFile, s_common.reqfile, dstpath) # Import it and make sure it exists self.none(cdir.importFile(srcpath, ftype)) with s_common.reqfile(dstpath) as dstfd: self.eq(dstfd.read(), b'arbitrary data') # Make sure it can't be overwritten self.raises(s_exc.FileExists, cdir.importFile, srcpath, ftype)
def test_easycert_importfile(self): with self.getTestDir() as tstpath: outp = self.getTestOutp() fname = 'coolfile.crt' srcpath = s_common.genpath(tstpath, fname) ftype = 'cas' argv = ['--importfile', ftype, '--certdir', tstpath, srcpath] with s_common.genfile(srcpath) as fd: self.eq(s_easycert.main(argv, outp=outp), 0) outp = self.getTestOutp() fname = '*****@*****.**' srcpath = s_common.genpath(tstpath, fname) ftype = 'cas' argv = ['--importfile', ftype, '--certdir', tstpath, srcpath] with s_common.genfile(srcpath) as fd: self.eq(s_easycert.main(argv, outp=outp), 0) outp = self.getTestOutp() argv = ['--importfile', 'cas', '--certdir', tstpath, 'nope'] self.raises(s_exc.NoSuchFile, s_easycert.main, argv, outp=outp)
def getModPath(self, *paths): ''' Construct a path relative to this module's working directory. Args: *paths: A list of path strings Notes: This creates the module specific directory if it does not exist. Returns: (str): The full path (or None if no cortex dir is configured). ''' dirn = self.getModDir() return s_common.genpath(dirn, *paths)
async def test_csvtool(self): async with self.getTestCore() as core: url = core.getLocalUrl() dirn = s_common.gendir(core.dirn, 'junk') logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath] outp = self.getTestOutp() await s_csvtool.main(argv, outp=outp) outp.expect('2 nodes (9 created)')
async def test_csvtool_cli(self): with self.getTestDir() as dirn: logpath = s_common.genpath(dirn, 'csvtest.log') csvpath = s_common.genpath(dirn, 'csvtest.csv') with s_common.genfile(csvpath) as fd: fd.write(csvfile) stormpath = s_common.genpath(dirn, 'csvtest.storm') with s_common.genfile(stormpath) as fd: fd.write(csvstorm) argv = ['--csv-header', '--debug', '--cli', '--test', '--logfile', logpath, stormpath, csvpath] outp = self.getTestOutp() cmdg = s_t_utils.CmdGenerator(['storm --hide-props inet:fqdn', EOFError()]) with self.withTestCmdr(cmdg): await s_csvtool.main(argv, outp=outp) outp.expect('inet:fqdn=google.com') outp.expect('2 nodes (9 created)')
def getRegrDir(self, *path): regr = os.getenv('SYN_REGRESSION_REPO') if regr is None: # pragma: no cover raise unittest.SkipTest('SYN_REGRESSION_REPO is not set') regr = s_common.genpath(regr) if not os.path.isdir(regr): # pragma: no cover raise Exception('SYN_REGREGSSION_REPO is not a dir') dirn = os.path.join(regr, *path) with self.getTestDir(copyfrom=dirn) as regrdir: yield regrdir
def test_msgpack_iterfile(self): t0 = ('5678', {'key': 1}) t1 = ('1234', {'key': 'haha'}) with self.getTestDir() as fdir: fd = s_common.genfile(fdir, 'test.mpk') for obj in (t0, t1): fd.write(s_msgpack.en(obj)) fd.close() gen = s_msgpack.iterfile(s_common.genpath(fdir, 'test.mpk')) items = [obj for obj in gen] self.len(2, items) self.sorteq(items, [t0, t1]) fd.close()
def getUserKeyPath(self, name): ''' Gets the path to a user key. Args: name (str): The name of the user keypair. Examples: Get the path to the user key for "myuser": mypath = cdir.getUserKeyPath('myuser') Returns: str: The path if exists. ''' path = s_common.genpath(self.certdir, 'users', '%s.key' % name) if not os.path.isfile(path): return None return path
def getClientCertPath(self, name): ''' Gets the path to a client certificate. Args: name (str): The name of the client keypair. Examples: Get the path to the client certificate for "myuser": mypath = cdir.getClientCertPath('myuser') Returns: str: The path if exists. ''' path = s_common.genpath(self.certdir, 'users', '%s.p12' % name) if not os.path.isfile(path): return None return path
def getCaKeyPath(self, name): ''' Gets the path to a CA key. Args: name (str): The name of the CA keypair. Examples: Get the path to the private key for the CA "myca": mypath = cdir.getCAKeyPath('myca') Returns: str: The path if exists. ''' path = s_common.genpath(self.certdir, 'cas', '%s.key' % name) if not os.path.isfile(path): return None return path
def getHostKeyPath(self, name): ''' Gets the path to a host key. Args: name (str): The name of the host keypair. Examples: Get the path to the host key for the host "myhost": mypath = cdir.getHostKeyPath('myhost') Returns: str: The path if exists. ''' path = s_common.genpath(self.certdir, 'hosts', '%s.key' % name) if not os.path.isfile(path): return None return path
def importFile(self, path, mode, outp=None): ''' Imports certs and keys into the Synapse cert directory Args: path (str): The path of the file to be imported. mode (str): The certdir subdirectory to import the file into. Examples: Import CA certifciate 'mycoolca.crt' to the 'cas' directory. certdir.importFile('mycoolca.crt', 'cas') Notes: importFile does not perform any validation on the files it imports. Returns: None ''' if not os.path.isfile(path): raise s_exc.NoSuchFile('File does not exist') fname = os.path.split(path)[1] parts = fname.rsplit('.', 1) ext = parts[1] if len(parts) is 2 else None if not ext or ext not in ('crt', 'key', 'p12'): mesg = 'importFile only supports .crt, .key, .p12 extensions' raise s_exc.BadFileExt(mesg=mesg, ext=ext) newpath = s_common.genpath(self.certdir, mode, fname) if os.path.isfile(newpath): raise s_exc.FileExists('File already exists') shutil.copy(path, newpath) if outp is not None: outp.printf('copied %s to %s' % (path, newpath))
async def __anit__(self, dirn): await s_cell.Cell.__anit__(self, dirn) self.dmon.share('cryotank', self) path = s_common.gendir(self.dirn, 'cryo.lmdb') self.names = await self.hive.open(('cryo', 'names')) self.tanks = await s_base.BaseRef.anit() self.onfini(self.tanks.fini) for name, node in self.names: iden, conf = node.valu logger.info('Bringing tank [%s][%s] online', name, iden) path = s_common.genpath(self.dirn, 'tanks', iden) tank = await CryoTank.anit(path, conf) self.tanks.put(name, tank)
async def __anit__(self, dirn, conf=None, readonly=False): await s_base.Base.__anit__(self) s_telepath.Aware.__init__(self) self.dirn = s_common.gendir(dirn) self.auth = None # each cell has a guid path = s_common.genpath(dirn, 'cell.guid') # generate a guid file if needed if not os.path.isfile(path): with open(path, 'w') as fd: fd.write(s_common.guid()) # read our guid file with open(path, 'r') as fd: self.iden = fd.read().strip() boot = self._loadCellYaml('boot.yaml') self.boot = s_common.config(boot, bootdefs) await self._initCellDmon() if conf is None: conf = {} [conf.setdefault(k, v) for (k, v) in self._loadCellYaml('cell.yaml').items()] self.conf = s_common.config(conf, self.confdefs + self.confbase) self.cmds = {} self.insecure = self.boot.get('insecure', False) self.sessions = {} self.httpsonly = self.conf.get('https:only', False) self.boss = await s_boss.Boss.anit() self.onfini(self.boss) await self._initCellSlab(readonly=readonly) self.hive = await self._initCellHive() self.auth = await self._initCellAuth() # check and migrate old cell auth oldauth = s_common.genpath(self.dirn, 'auth') if os.path.isdir(oldauth): await s_compat.cellAuthToHive(oldauth, self.auth) os.rename(oldauth, oldauth + '.old') admin = self.boot.get('auth:admin') if admin is not None: name, passwd = admin.split(':', 1) user = self.auth.getUserByName(name) if user is None: user = await self.auth.addUser(name) await user.setAdmin(True) await user.setPasswd(passwd) self.insecure = False await self._initCellHttp() async def fini(): [await s.fini() for s in self.sessions.values()] self.onfini(fini)
def _loadCasIntoSSLContext(self, ctx): path = s_common.genpath(self.certdir, 'cas') for name in os.listdir(path): if name.endswith('.crt'): ctx.load_verify_locations(os.path.join(path, name))
def _getPathJoin(self, *paths): return s_common.genpath(self.certdir, *paths)