async def main(argv, outp=s_output.stdout): pars = argparse.ArgumentParser() pars.add_argument('--push', metavar='<url>', help='A telepath URL of a Cortex or PkgRepo.') pars.add_argument('--save', metavar='<path>', help='Save the completed package JSON to a file.') pars.add_argument('--optic', metavar='<path>', help='Load Optic module files from a directory.') pars.add_argument('pkgfile', metavar='<pkgfile>', help='Path to a storm package prototype yml file.') opts = pars.parse_args(argv) pkgdef = loadPkgProto(opts.pkgfile, opticdir=opts.optic) if opts.save: s_common.jssave(pkgdef, opts.save) if opts.push: path = s_common.genpath('~/.syn/telepath.yaml') fini = await s_telepath.loadTeleEnv(path) async with await s_telepath.openurl(opts.push) as core: await core.addStormPkg(pkgdef) if fini is not None: # pragma: no cover await fini() return 0
def test_doc_data(self): with self.getTestDir() as dirn: s_common.gendir(dirn, 'docdata', 'stuff') docdata = s_common.genpath(dirn, 'docdata') root = s_common.genpath(dirn, 'synapse', 'userguides') d = {'key': 'value'} s_common.jssave(d, docdata, 'data.json') s_common.yamlsave(d, docdata, 'data.yaml') s_msgpack.dumpfile(d, os.path.join(docdata, 'data.mpk')) with s_common.genfile(docdata, 'stuff', 'data.txt') as fd: fd.write('beep'.encode()) with s_common.genfile(docdata, 'data.jsonl') as fd: fd.write(json.dumps(d).encode() + b'\n') fd.write(json.dumps(d).encode() + b'\n') fd.write(json.dumps(d).encode() + b'\n') data = s_jupyter.getDocData('data.json', root) self.eq(data, d) data = s_jupyter.getDocData('data.yaml', root) self.eq(data, d) data = s_jupyter.getDocData('data.mpk', root) self.eq(data, d) data = s_jupyter.getDocData('stuff/data.txt', root) self.eq(data, b'beep') data = s_jupyter.getDocData('data.jsonl', root) self.eq(data, [d, d, d]) self.raises(ValueError, s_jupyter.getDocData, 'newp.bin', root) self.raises(ValueError, s_jupyter.getDocData, '../../../../../../etc/passwd', root)
def _fireAxonIden(self, iden): axondir = s_common.gendir(self.datadir, '%s.axon' % iden) opts = self.makeAxonOpts() jsopts = s_common.jsload(axondir, 'axon.opts') if jsopts is not None: opts.update(jsopts) # Special case where the axonbus may update - we want to ensure # we're passing the latest axonbus to the Axon so it can register # itself properly. axonbus = opts.get('axon:axonbus') if axonbus is not None: myaxonbus = self.getConfOpt('axon:axonbus') if axonbus != myaxonbus: opts['axon:axonbus'] = myaxonbus s_common.jssave(opts, axondir, 'axon.opts') self.axons[iden] = Axon(axondir, **opts) bytemax = opts.get('axon:bytemax') clone = opts.get('axon:clone') if clone: self.cloneaxons.append(iden) self.usedspace = self.usedspace + bytemax
async def test_storm_file_optfile(self): async with self.getTestCoreAndProxy() as (core, prox): test_opts = {'vars': {'hehe': 'woot.com'}} dirn = s_common.gendir(core.dirn, 'junk') optsfile = os.path.join(dirn, 'woot.json') optsfile_yaml = os.path.join(dirn, 'woot.yaml') stormfile = os.path.join(dirn, 'woot.storm') with s_common.genfile(stormfile) as fd: fd.write(b'[ inet:fqdn=$hehe ]') s_common.jssave(test_opts, optsfile) s_common.yamlsave(test_opts, optsfile_yaml) outp = self.getTestOutp() cmdr = await s_cmdr.getItemCmdr(prox, outp=outp) await cmdr.runCmdLine( f'storm --optsfile {optsfile} --file {stormfile}') self.true(outp.expect('inet:fqdn=woot.com')) outp = self.getTestOutp() cmdr = await s_cmdr.getItemCmdr(prox, outp=outp) await cmdr.runCmdLine( f'storm --optsfile {optsfile_yaml} --file {stormfile}') self.true(outp.expect('inet:fqdn=woot.com')) # Sad path cases outp = self.getTestOutp() cmdr = await s_cmdr.getItemCmdr(prox, outp=outp) await cmdr.runCmdLine( f'storm --file {stormfile} --optsfile {optsfile} .created') self.true( outp.expect( 'Cannot use a storm file and manual query together.')) self.false(outp.expect('inet:fqdn=woot.com', throw=False)) outp = self.getTestOutp() cmdr = await s_cmdr.getItemCmdr(prox, outp=outp) await cmdr.runCmdLine(f'storm --file {stormfile} --optsfile newp') self.true(outp.expect('optsfile not found')) outp = self.getTestOutp() cmdr = await s_cmdr.getItemCmdr(prox, outp=outp) await cmdr.runCmdLine(f'storm --file newp --optsfile {optsfile}') self.true(outp.expect('file not found'))
async def test_syningest_fail(self): with self.getTestDir() as dirn: gestdef = {'forms': {'test:str': ['yes', ], 'newp': ['haha', ], } } gestfp = s_common.genpath(dirn, 'gest.json') s_common.jssave(gestdef, gestfp) argv = ['--test', '--modules', 'synapse.tests.utils.TestModule', gestfp] outp = self.getTestOutp() with self.getLoggerStream('synapse.lib.snap', 'NoSuchForm') as stream: self.eq(await s_feed.main(argv, outp=outp), 0) self.true(stream.wait(1))
async def test_syningest_local(self): with self.getTestDir() as dirn: guid = s_common.guid() seen = s_common.now() gestdef = self.getIngestDef(guid, seen) gestfp = s_common.genpath(dirn, 'gest.json') s_common.jssave(gestdef, gestfp) argv = ['--test', '--debug', '--modules', 'synapse.tests.utils.TestModule', gestfp] outp = self.getTestOutp() cmdg = s_t_utils.CmdGenerator(['storm test:pivcomp -> *', EOFError()]) with self.withCliPromptMockExtendOutp(outp): with self.withTestCmdr(cmdg): self.eq(await s_feed.main(argv, outp=outp), 0) self.true(outp.expect('test:str=haha', throw=False)) self.true(outp.expect('test:pivtarg=hehe', throw=False))
def test_syningest_local(self): with self.getTestDir() as dirn: guid = s_common.guid() seen = s_common.now() gestdef = self.getIngestDef(guid, seen) gestfp = s_common.genpath(dirn, 'gest.json') s_common.jssave(gestdef, gestfp) argv = [ '--test', '--debug', '--modules', 'synapse.tests.utils.TestModule', gestfp ] outp = self.getTestOutp() cmdg = s_t_utils.CmdGenerator(['storm pivcomp -> *'], on_end=EOFError) with mock.patch('synapse.lib.cli.get_input', cmdg): self.eq(s_feed.main(argv, outp=outp), 0) self.true(outp.expect('teststr=haha', throw=False)) self.true(outp.expect('pivtarg=hehe', throw=False))
def add(self, **opts): ''' Add a new axon to the AxonHost. Example: # add another axon to the host with defaults axfo = axho.add() ''' iden = s_common.guid() opts['iden'] = iden # store iden as a specified option fullopts = dict(self.opts) fullopts.update(opts) bytemax = fullopts.get('bytemax') if not fullopts.get('clone'): bytemax += fullopts.get('syncmax') volinfo = s_thisplat.getVolInfo(self.datadir) free = volinfo.get('free') total = volinfo.get('total') if bytemax > free: raise s_common.NotEnoughFree(bytemax) axondir = s_common.gendir(self.datadir, '%s.axon' % iden) s_common.jssave(opts, axondir, 'axon.opts') # FIXME fork axon = Axon(axondir, **fullopts) self.axons[iden] = axon return axon.axfo
def benchmarkAll( confignames: List = None, num_procs=1, workfactor=1000, tmpdir=None, jsondir: str = None, jsonprefix: str = None, ) -> None: if jsondir: s_common.gendir(jsondir) testdata = TestData(workfactor) if not confignames: confignames = ['simple'] for configname in confignames: tick = s_common.now() config = Configs[configname] bench = Benchmarker(config, testdata, workfactor) print(f'{num_procs}-process benchmarking: {configname}') asyncio.run(bench.runSuite(config, num_procs)) bench.printreport(configname) if jsondir: data = { 'time': tick, 'config': config, 'configname': configname, 'workfactor': workfactor, 'niters': bench.num_iters, 'results': bench.reportdata() } fn = f'{s_time.repr(tick, pack=True)}_{configname}.json' if jsonprefix: fn = f'{jsonprefix}{fn}' data['prefix'] = jsonprefix s_common.jssave(data, jsondir, fn)
async def benchmarkAll(confignames: List = None, num_procs=1, workfactor=1000, tmpdir: str = None, jsondir: str = None, jsonprefix: str = None, niters: int = 4, bench=None, do_profiling=False, tag=None, ) -> None: if jsondir: s_common.gendir(jsondir) if do_profiling: yappi.set_clock_type('wall') with syntest.getTestDir(startdir=tmpdir) as dirn: async with await TestData.anit(workfactor, dirn) as testdata: print('Initial cortex created') if not confignames: confignames = ['simple'] for configname in confignames: tick = s_common.now() config = Configs[configname] bencher = Benchmarker(config, testdata, workfactor, num_iters=niters, tmpdir=tmpdir, bench=bench, tag=tag) print(f'{num_procs}-process benchmarking: {configname}') initProgress(niters * len(bencher._getTrialFuncs())) try: await bencher.runSuite(num_procs, do_profiling=do_profiling) endProgress() if do_profiling: stats = yappi.get_func_stats() stats.print_all() perfdir = tmpdir or tempfile.gettempdir() perffn = pathlib.Path(perfdir) / f'{configname}_{datetime.datetime.now().isoformat()}.out' print(f'Callgrind stats output to {str(perffn)}') stats.save(perffn, 'CALLGRIND') yappi.clear_stats() bencher.printreport(configname) if jsondir: data = {'time': tick, 'config': config, 'configname': configname, 'workfactor': workfactor, 'niters': niters, 'results': bencher.reportdata() } fn = f'{s_time.repr(tick, pack=True)}_{configname}.json' if jsonprefix: fn = f'{jsonprefix}{fn}' data['prefix'] = jsonprefix s_common.jssave(data, jsondir, fn) finally: endProgress()
async def main(argv, outp=s_output.stdout): pars = argparse.ArgumentParser() pars.add_argument('--push', metavar='<url>', help='A telepath URL of a Cortex or PkgRepo.') pars.add_argument('--save', metavar='<path>', help='Save the completed package JSON to a file.') pars.add_argument('--optic', metavar='<path>', help='Load Optic module files from a directory.') pars.add_argument( '--no-build', action='store_true', help='Treat pkgfile argument as an already-built package') pars.add_argument( '--no-docs', default=False, action='store_true', help= 'Do not require docs to be present and replace any doc content with empty strings.' ) pars.add_argument( 'pkgfile', metavar='<pkgfile>', help= 'Path to a storm package prototype yml file, or a completed package JSON file.' ) opts = pars.parse_args(argv) if opts.no_build: pkgdef = s_common.jsload(opts.pkgfile) if opts.save: print( f'File {opts.pkgfile} is treated as already built (--no-build); incompatible with --save.', file=sys.stderr) return 1 else: pkgdef = loadPkgProto(opts.pkgfile, opticdir=opts.optic, no_docs=opts.no_docs) if not opts.save and not opts.push: print('Neither --push nor --save provided. Nothing to do.', file=sys.stderr) return 1 if opts.save: s_common.jssave(pkgdef, opts.save) if opts.push: path = s_common.genpath('~/.syn/telepath.yaml') fini = await s_telepath.loadTeleEnv(path) async with await s_telepath.openurl(opts.push) as core: await core.addStormPkg(pkgdef) if fini is not None: # pragma: no cover await fini() return 0
def add(self, **opts): ''' Add a new axon to the AxonHost. Args: **opts: kwarg values which supersede the defaults of the AxonHost when making the Axon. Examples: Add another Axon to the host with defaults:: axfo = host.add() Returns: ((str, dict)): A Axon information tuple containing configuration and link data. ''' iden = s_common.guid() fullopts = self.makeAxonOpts() fullopts['axon:iden'] = iden # store iden as a specified option fullopts.update(opts) bytemax = fullopts.get('axon:bytemax') clone = fullopts.get('axon:clone') volinfo = s_thisplat.getVolInfo(self.datadir) free = volinfo.get('free') total = volinfo.get('total') maxsize = self.getConfOpt('axonhost:maxsize') if maxsize and (self.usedspace + bytemax) > maxsize: raise s_common.NotEnoughFree( mesg= 'Not enough free space on the AxonHost (due to axonhost:maxsize) to ' 'create the new Axon.', bytemax=bytemax, maxsize=maxsize, usedspace=self.usedspace) if (self.usedspace + bytemax) > free: raise s_common.NotEnoughFree( mesg= 'Not enough free space on the volume when considering the allocations' ' of existing Axons.', bytemax=bytemax, free=free, usedspace=self.usedspace) if bytemax > free: raise s_common.NotEnoughFree( mesg= 'Not enough free space on the volume to create the new Axon.', bytemax=bytemax, free=free) axondir = s_common.gendir(self.datadir, '%s.axon' % iden) s_common.jssave(fullopts, axondir, 'axon.opts') # FIXME fork axon = Axon(axondir, **fullopts) self.usedspace = self.usedspace + bytemax self.axons[iden] = axon if clone: self.cloneaxons.append(iden) return axon.axfo
async def test_multislabseqn_discover(self): ''' Test all the horrible things that admins can do by deleting/moving slabs ''' # Speed up copying dirs slabopts = {'map_size': 100000} with self.getTestDir() as dirn: origdirn = s_common.gendir(dirn, 'orig') async with await s_multislabseqn.MultiSlabSeqn.anit(origdirn, slabopts=slabopts) as msqn: for i in range(25): if i > 0 and i % 10 == 0: await msqn.rotate() await msqn.add(f'foo{i}') baddirn = s_common.genpath(dirn, 'bad1') shutil.copytree(origdirn, baddirn) # Make a slab a non-dir slab0dirn = s_common.genpath(baddirn, f'seqn{"0" * 16}.lmdb') shutil.rmtree(slab0dirn) s_common.jssave('{}', slab0dirn) with self.getAsyncLoggerStream('synapse.lib.multislabseqn', 'non-directory') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) await stream.wait(timeout=1) # Switcheroo baddirn = s_common.genpath(dirn, 'bad2') shutil.copytree(origdirn, baddirn) slab0dirn = s_common.genpath(baddirn, f'seqn{"0" * 16}.lmdb') slab10dirn = s_common.genpath(baddirn, f'seqn{"0" * 14}0a.lmdb') tmpdirn = s_common.genpath(baddirn, 'tmp') shutil.move(slab10dirn, tmpdirn) shutil.move(slab0dirn, slab10dirn) shutil.move(tmpdirn, slab0dirn) with self.raises(s_exc.BadCoreStore): async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: pass # Delete out from the middle baddirn = s_common.genpath(dirn, 'bad3') shutil.copytree(origdirn, baddirn) slab10dirn = s_common.genpath(baddirn, f'seqn{"0" * 14}0a.lmdb') shutil.rmtree(slab10dirn) with self.getAsyncLoggerStream('synapse.lib.multislabseqn', 'gap in indices') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) await stream.wait(timeout=1) # Wipe a seqn clean baddirn = s_common.genpath(dirn, 'bad4') shutil.copytree(origdirn, baddirn) slab20dirn = s_common.genpath(baddirn, f'seqn{"0" * 14}14.lmdb') async with await s_lmdbslab.Slab.anit(slab20dirn) as slab: seqn = slab.getSeqn('nexuslog') await seqn.cull(25) async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(20, msqn.iter(0)) # Overlapping seqns baddirn = s_common.genpath(dirn, 'bad5') shutil.copytree(origdirn, baddirn) slab10dirn = s_common.genpath(baddirn, f'seqn{"0" * 14}0a.lmdb') async with await s_lmdbslab.Slab.anit(slab10dirn) as slab: seqn = slab.getSeqn('nexuslog') seqn.add('foo', indx=22) with self.raises(s_exc.BadCoreStore): async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: pass # Somebody really messing with us baddirn = s_common.genpath(dirn, 'bad6') shutil.copytree(origdirn, baddirn) slab20dirn = s_common.genpath(baddirn, f'seqn{"0" * 14}14.lmdb') async with await s_lmdbslab.Slab.anit(slab20dirn) as slab: db = slab.initdb('info') slab.put(b'firstindx', s_common.int64en(99), db=db) with self.raises(s_exc.BadCoreStore): async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: pass
def _saveOptsFile(self): opts = {'map_size': self.mapsize, 'growsize': self.growsize} s_common.jssave(opts, self.optspath)