async def test_load_failures(self): async with self.getTestCore() as core: # type: s_cortex.Cortex with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: with self.getAsyncLoggerStream('synapse.cortex', 'preCoreModuleFail') as stream: self.none(await core.loadCoreModule(foo_ctor)) self.true(await stream.wait(1)) self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: with self.getAsyncLoggerStream('synapse.cortex', 'initCoreModuleFail') as stream: self.none(await core.loadCoreModule(foo_ctor)) self.true(await stream.wait(1)) self.none(core.getCoreMod(foo_ctor)) with self.getTestDir(mirror='testcore') as dirn: conf = s_common.yamlload(dirn, 'cell.yaml') conf['modules'].append(foo_ctor) s_common.yamlsave(conf, dirn, 'cell.yaml') conf = s_common.yamlload(dirn, 'cell.yaml') with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: with self.getAsyncLoggerStream('synapse.cortex', 'preCoreModuleFail') as stream: async with await s_cortex.Cortex.anit(dirn) as core: self.true(await stream.wait(1)) self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: with self.getAsyncLoggerStream('synapse.cortex', 'initCoreModuleFail') as stream: async with await s_cortex.Cortex.anit(dirn) as core: self.true(await stream.wait(1)) self.none(core.getCoreMod(foo_ctor))
def test_deploy_auth(self): with self.getTestDir() as dirn: outp = self.getTestOutp() argv = ['cortex', 'core', dirn, '--auth'] ret = s_deploy.main(argv, outp) self.eq(ret, 0) d = s_common.yamlload(dirn, 'cells', 'core', 'boot.yaml') self.eq(d, { 'auth:en': True, 'type': 'cortex', 'cell:name': 'core' }) # Sad path outp = self.getTestOutp() argv = ['cortex', 'core', dirn, '--auth'] ret = s_deploy.main(argv, outp) self.eq(ret, 1) outp.expect('cell directory already exists') with self.getTestDir() as dirn: outp = self.getTestOutp() argv = ['cortex', 'core', dirn, '--admin', 'pennywise:clownshoes'] ret = s_deploy.main(argv, outp) self.eq(ret, 0) d = s_common.yamlload(dirn, 'cells', 'core', 'boot.yaml') self.eq( d, { 'auth:en': True, 'type': 'cortex', 'cell:name': 'core', 'auth:admin': 'pennywise:clownshoes' })
def test_common_yaml(self): obj = [{'key': 1, 'key2': [1, 2, 3], 'key3': True, 'key4': 'some str', 'key5': { 'oh': 'my', 'we all': 'float down here' }, }, 'duck', False, 'zero', 0.1, ] with self.getTestDir() as dirn: s_common.yamlsave(obj, dirn, 'test.yaml') robj = s_common.yamlload(dirn, 'test.yaml') self.eq(obj, robj) obj = {'foo': 'bar', 'zap': [3, 4, 'f']} s_common.yamlsave(obj, dirn, 'test.yaml') s_common.yamlmod({'bar': 42}, dirn, 'test.yaml') robj = s_common.yamlload(dirn, 'test.yaml') obj['bar'] = 42 self.eq(obj, robj) # Test yaml helper safety s = '!!python/object/apply:os.system ["pwd"]' with s_common.genfile(dirn, 'explode.yaml') as fd: fd.write(s.encode()) self.raises(yaml.YAMLError, s_common.yamlload, dirn, 'explode.yaml')
async def test_tools_genpkg(self): ymlpath = s_common.genpath(dirname, 'files', 'stormpkg', 'testpkg.yml') async with self.getTestCore() as core: savepath = s_common.genpath(core.dirn, 'testpkg.json') url = core.getLocalUrl() argv = ('--push', url, '--save', savepath, ymlpath) await s_genpkg.main(argv) await core.callStorm('testcmd') await core.callStorm('$lib.import(testmod)') pdef = s_common.yamlload(savepath) self.eq(pdef['name'], 'testpkg') self.eq(pdef['version'], (0, 0, 1)) self.eq(pdef['modules'][0]['name'], 'testmod') self.eq(pdef['modules'][0]['storm'], 'inet:ipv4\n') self.eq(pdef['commands'][0]['name'], 'testcmd') self.eq(pdef['commands'][0]['storm'], 'inet:ipv6\n') self.eq(pdef['optic']['files']['index.html']['file'], 'aGkK')
def loadPkgProto(path, opticdir=None): full = s_common.genpath(path) pkgdef = s_common.yamlload(full) if isinstance(pkgdef['version'], str): pkgdef['version'] = chopSemVer(pkgdef['version']) protodir = os.path.dirname(full) for mod in pkgdef.get('modules', ()): name = mod.get('name') with s_common.genfile(protodir, 'storm', 'modules', name) as fd: mod['storm'] = fd.read().decode() for cmd in pkgdef.get('commands', ()): name = cmd.get('name') with s_common.genfile(protodir, 'storm', 'commands', name) as fd: cmd['storm'] = fd.read().decode() if opticdir is None: opticdir = s_common.genpath(protodir, 'optic') if os.path.isdir(opticdir): pkgdef.setdefault('optic', {}) pkgdef['optic'].setdefault('files', {}) loadOpticFiles(pkgdef, opticdir) return pkgdef
async def main(argv, outp=s_output.stdout): pars = argparse.ArgumentParser( prog='synapse.tools.hive.load', description='Load data into a remote hive from a previous hivesave.') pars.add_argument('--trim', default=False, action='store_true', help='Trim all other hive nodes (DANGER!)') pars.add_argument('--path', default=None, help='A hive path string to use as the root.') pars.add_argument( '--yaml', default=False, action='store_true', help='Parse the savefile as a YAML file (default: msgpack)') pars.add_argument('hiveurl', help='The telepath URL for the remote hive.') pars.add_argument('filepath', help='The local file path to load.') opts = pars.parse_args(argv) if opts.yaml: tree = s_common.yamlload(opts.filepath) else: tree = s_msgpack.loadfile(opts.filepath) path = () if opts.path is not None: path = opts.path.split('/') async with await s_telepath.openurl(opts.hiveurl) as hive: await hive.loadHiveTree(tree, path=path, trim=opts.trim)
async def loadTeleEnv(path): if not os.path.isfile(path): return conf = s_common.yamlload(path) vers = conf.get('version') if vers != 1: logger.warning(f'telepath.yaml unknown version: {vers}') return ahas = conf.get('aha:servers', ()) cdirs = conf.get('certdirs', ()) for a in ahas: await addAhaUrl(a) for p in cdirs: s_certdir.addCertPath(p) async def fini(): for a in ahas: await delAhaUrl(a) for p in cdirs: s_certdir.delCertPath(p) return fini
def _loadCellYaml(self, *path): path = os.path.join(self.dirn, *path) if os.path.isfile(path): logger.debug('Loading file from [%s]', path) return s_common.yamlload(path) return {}
async def initFromDirn(dirn, *args, **kwargs): ''' As above, but retrieves type from boot.yaml in dirn ''' conf = s_common.yamlload(dirn, 'boot.yaml') or {} kind = conf.get('type') if type is None: raise s_exc.BadConfValu('boot.yaml missing type key') return await init(kind, dirn, *args, **kwargs)
async def __anit__(self, path, **kwargs): await s_base.Base.__anit__(self) kwargs.setdefault('map_size', s_const.gibibyte) opts = kwargs self.path = pathlib.Path(path) self.optspath = self.path.with_suffix('.opts.yaml') if self.optspath.exists(): opts.update(s_common.yamlload(self.optspath)) initial_mapsize = opts.get('map_size') if initial_mapsize is None: raise s_exc.BadArg('Slab requires map_size') mdbpath = self.path / 'data.mdb' if mdbpath.exists(): mapsize = max(initial_mapsize, os.path.getsize(mdbpath)) else: mapsize = initial_mapsize # save the transaction deltas in case of error... self.xactops = [] self.recovering = False opts.setdefault('max_dbs', 128) opts.setdefault('writemap', True) self.maxsize = opts.pop('maxsize', None) self.growsize = opts.pop('growsize', None) self.readonly = opts.get('readonly', False) self.mapsize = _mapsizeround(mapsize) if self.maxsize is not None: self.mapsize = min(self.mapsize, self.maxsize) self._saveOptsFile() self.lenv = lmdb.open(path, **opts) self.scans = set() self.dirty = False if self.readonly: self.xact = None self.txnrefcount = 0 else: self._initCoXact() self.onfini(self._onCoFini) self.schedCoro(self._runSyncLoop())
async def main(argv, outp=s_output.stdout): pars = argparse.ArgumentParser( prog='synapse.tools.hive.load', description='Load data into a remote hive from a previous hivesave.') pars.add_argument('--trim', default=False, action='store_true', help='Trim all other hive nodes (DANGER!)') pars.add_argument('--path', default=None, help='A hive path string to use as the root.') pars.add_argument( '--yaml', default=False, action='store_true', help='Parse the savefile as a YAML file (default: msgpack)') pars.add_argument('hiveurl', help='The telepath URL for the remote hive.') pars.add_argument('filepath', help='The local file path to load.') opts = pars.parse_args(argv) if opts.yaml: tree = s_common.yamlload(opts.filepath) else: tree = s_msgpack.loadfile(opts.filepath) path = () if opts.path is not None: path = opts.path.split('/') async with await s_telepath.openurl(opts.hiveurl) as hive: classes = hive.sharinfo.get('classes', ()) try: s_version.reqVersion(hive._getSynVers(), reqver) if 'synapse.lib.hive.HiveApi' in classes: await hive.loadHiveTree(tree, path=path, trim=opts.trim) else: todo = s_common.todo('loadHiveTree', tree, path=path, trim=opts.trim) await hive.dyncall('cell', todo) except s_exc.BadVersion as e: valu = s_version.fmtVersion(*e.get('valu')) outp.printf( f'Hive version {valu} is outside of the hive.load supported range ({reqver}).' ) outp.printf( f'Please use a version of Synapse which supports {valu}; current version is {s_version.verstring}.' ) return 1
async def test_tools_hivesave(self): with self.getTestDir() as dirn: hivepath0 = os.path.join(dirn, 'hivesave0.mpk') yamlpath0 = os.path.join(dirn, 'hivesave0.yaml') async with self.getTestHiveDmon() as dmon: hive = dmon.shared.get('hive') await hive.set(('baz', 'faz'), 'visi') hurl = self.getTestUrl(dmon, 'hive') await s_hivesave.main([hurl, hivepath0]) tree = s_msgpack.loadfile(hivepath0) self.eq('visi', tree['kids']['baz']['kids']['faz']['value']) await s_hivesave.main( ['--path', 'baz', '--yaml', hurl, yamlpath0]) tree = s_common.yamlload(yamlpath0) self.eq('visi', tree['kids']['faz']['value']) hivepath1 = os.path.join(dirn, 'hivesave1.mpk') yamlpath1 = os.path.join(dirn, 'hivesave1.yaml') path = os.path.join(dirn, 'cell') async with await s_cell.Cell.anit(path) as cell: await cell.hive.set(('hehe', 'haha'), 20) curl = cell.getLocalUrl() await s_hivesave.main([curl, hivepath1]) tree = s_msgpack.loadfile(hivepath1) self.eq(20, tree['kids']['hehe']['kids']['haha']['value']) await s_hivesave.main( ['--path', 'hehe', '--yaml', curl, yamlpath1]) tree = s_common.yamlload(yamlpath1) self.eq(20, tree['kids']['haha']['value'])
def __init__(self, core): self.core = core # type: synapse.cortex.Cortex self.model = core.model # type: synapse.datamodel.Model # Avoid getModPath / getConfPath during __init__ since these APIs # will create directories. We do not need that behavior by default. self._modpath = os.path.join(self.core.dirn, 'mods', self.getModName()) self._confpath = os.path.join(self._modpath, 'conf.yaml') conf = {} if os.path.isfile(self._confpath): conf = s_common.yamlload(self._confpath) self.conf = s_common.config(conf, self.confdefs)
def test_common_yaml(self): obj = [ { 'key': 1, 'key2': [1, 2, 3], 'key3': True, 'key4': 'some str', 'key5': { 'oh': 'my', 'we all': 'float down here' }, }, 'duck', False, 'zero', 0.1, ] with self.getTestDir() as dirn: s_common.yamlsave(obj, dirn, 'test.yaml') robj = s_common.yamlload(dirn, 'test.yaml') self.eq(obj, robj) obj = {'foo': 'bar', 'zap': [3, 4, 'f']} s_common.yamlsave(obj, dirn, 'test.yaml') s_common.yamlmod({'bar': 42}, dirn, 'test.yaml') robj = s_common.yamlload(dirn, 'test.yaml') obj['bar'] = 42 self.eq(obj, robj) # Test yaml helper safety s = '!!python/object/apply:os.system ["pwd"]' with s_common.genfile(dirn, 'explode.yaml') as fd: fd.write(s.encode()) self.raises(yaml.YAMLError, s_common.yamlload, dirn, 'explode.yaml')
async def _makeDefaultLayer(self): ''' Since a user hasn't specified any layers, make one ''' import synapse.cells as s_cells layerdir = s_common.gendir(self.dirn, 'layers', DEFAULT_LAYER_NAME) s_cells.deploy('layer-lmdb', layerdir) mapsize = self.conf.get('layer:lmdb:mapsize') if mapsize is not None: cell_yaml = pathlib.Path(layerdir, 'cell.yaml') conf = s_common.yamlload(cell_yaml) or {} conf['lmdb:mapsize'] = mapsize s_common.yamlsave(conf, cell_yaml) logger.info('Creating a new default storage layer at %s', layerdir) return await s_cells.initFromDirn(layerdir)
async def __anit__(self, path, **kwargs): await s_base.Base.__anit__(self) kwargs.setdefault('map_size', s_const.gibibyte) opts = kwargs self.path = path self.optspath = os.path.join(path, 'opts.yaml') if os.path.isfile(self.optspath): opts.update(s_common.yamlload(self.optspath)) self.mapsize = opts.get('map_size') if self.mapsize is None: raise Exception('Slab requires map_size!') # save the transaction deltas in case of error... self.xactops = [] self.recovering = False opts.setdefault('max_dbs', 128) opts.setdefault('writemap', True) # if growsize is not set, we double... self.maxsize = opts.pop('maxsize', None) self.growsize = opts.pop('growsize', None) self.readonly = opts.get('readonly', False) self.lenv = lmdb.open(path, **opts) self.scans = set() self.holders = 0 self.dirty = False if self.readonly: self.xact = None self.txnrefcount = 0 else: self._initCoXact() self.onfini(self._onCoFini) self.schedCoro(self._runSyncLoop())
def __init__(self, core, conf=None): self.core = core self.model = core.model if self.mod_name is None: self.mod_name = self.getModName() # Avoid getModPath / getConfPath during __init__ since these APIs # will create directories. We do not need that behavior by default. self._modpath = os.path.join(self.core.dirn, 'mods', self.getModName()) self._confpath = os.path.join(self._modpath, 'conf.yaml') if conf is None: conf = {} if os.path.isfile(self._confpath): conf = s_common.yamlload(self._confpath) self.conf = s_common.config(conf, self.confdefs)
def getItems(*paths): items = [] for path in paths: if path.endswith('.json'): item = s_common.jsload(path) if not isinstance(item, list): item = [item] items.append((path, item)) elif path.endswith(('.yaml', '.yml')): item = s_common.yamlload(path) if not isinstance(item, list): item = [item] items.append((path, item)) elif path.endswith('.mpk'): genr = s_msgpack.iterfile(path) items.append((path, genr)) else: # pragma: no cover logger.warning('Unsupported file path: [%s]', path) return items
async def test_migr_assvr_defaults(self): ''' Test that migration service is being properly initialized from default cmdline args. ''' with self.getRegrDir('cortexes', REGR_VER) as src: # sneak in test for missing splice slab - no impact to migration for root, dirs, _ in os.walk(src, topdown=True): for dname in dirs: if dname == 'splices.lmdb': shutil.rmtree(os.path.join(root, dname)) # check defaults with self.getTestDir() as dest, self.withSetLoggingMock(): argv = [ '--src', src, '--dest', dest, ] async with await s_migr.main(argv) as migr: self.true(migr.isfini) self.eq(migr.src, src) self.eq(migr.dest, dest) self.sorteq(migr.migrops, s_migr.ALL_MIGROPS) self.eq(migr.addmode, 'nexus') self.eq(migr.editbatchsize, 100) self.eq(migr.fairiter, 100) self.none(migr.nodelim) self.false(migr.safetyoff) self.false(migr.fromlast) self.false(migr.srcdedicated) self.false(migr.destdedicated) # check the saved offset file offsyaml = s_common.yamlload(dest, 'migration', 'lyroffs.yaml') self.true(all(v['nextoffs'] == 0 for v in offsyaml.values())) # startup 0.2.0 core async with await s_cortex.Cortex.anit(dest, conf=None) as core: nodes = await core.nodes('inet:ipv4=1.2.3.4') self.len(1, nodes) nodes = await core.nodes('[inet:ipv4=9.9.9.9]') self.len(1, nodes)
async def _initCellHive(self): hurl = self.conf.get('hive') if hurl is not None: return await s_hive.openurl(hurl) isnew = not self.slab.dbexists('hive') db = self.slab.initdb('hive') hive = await s_hive.SlabHive.anit(self.slab, db=db) self.onfini(hive) if isnew: path = os.path.join(self.dirn, 'hiveboot.yaml') if os.path.isfile(path): tree = s_common.yamlload(path) if tree is not None: await hive.loadHiveTree(tree) return hive
def __init__(self, core, conf=None): self.core = core # type: synapse.cortex.Cortex self.model = core.model # type: synapse.datamodel.Model if self.mod_name is None: self.mod_name = self.getModName() # Avoid getModPath / getConfPath during __init__ since these APIs # will create directories. We do not need that behavior by default. self._modpath = os.path.join(self.core.dirn, 'mods', self.getModName()) self._confpath = os.path.join(self._modpath, 'conf.yaml') if conf is None: conf = {} if os.path.isfile(self._confpath): conf = s_common.yamlload(self._confpath) self.conf = s_common.config(conf, self.confdefs)
async def startSyncFromFile(self): ''' Start sync from layer offsets provided in offsfile generated by migration tool, e.g. <lyriden> created: <epochms> nextoffs: <int> Returns: (list): Of (<lyriden>, <offset>) tuples ''' lyroffs = s_common.yamlload(self.offsfile) retn = [] for lyriden, info in lyroffs.items(): await self._resetLyrErrs(lyriden) nextoffs = info['nextoffs'] logger.info(f'Starting Layer sync for {lyriden} from file offset {nextoffs}') await self._startLyrSync(lyriden, nextoffs) retn.append((lyriden, nextoffs)) return retn
async def _initCellHive(self): isnew = not self.slab.dbexists('hive') db = self.slab.initdb('hive') hive = await s_hive.SlabHive.anit(self.slab, db=db, nexsroot=self.nexsroot) self.onfini(hive) if isnew: path = os.path.join(self.dirn, 'hiveboot.yaml') if os.path.isfile(path): logger.debug(f'Loading cell hive from {path}') tree = s_common.yamlload(path) if tree is not None: # Pack and unpack the tree to avoid tuple/list issues # for in-memory structures. tree = s_msgpack.un(s_msgpack.en(tree)) await hive.loadHiveTree(tree) return hive
def alias(name): ''' Resolve a telepath alias via ~/.syn/aliases.yaml Args: name (str): Name of the alias to resolve. Notes: An exact match against the aliases will always be returned first. If no exact match is found and the name contains a '/' in it, the value before the slash is looked up and the remainder of the path is joined to any result. This is done to support dynamic Telepath share names. Returns: str: The url string, if present in the alias. None will be returned if there are no matches. ''' path = s_common.getSynPath('aliases.yaml') if not os.path.isfile(path): return None conf = s_common.yamlload(path) # Is there an exact match - if so, return it. url = conf.get(name) if url: return url # Since telepath supports dynamic shared object access, # slice a name at the first '/', look up using that value # and then append the second value to it. dynname = None if '/' in name: name, dynname = name.split('/', 1) url = conf.get(name) if url and dynname: url = '/'.join([url, dynname]) return url
def test_buildpkg(self): if vt_buildpkg.s_common is None: self.skipTest( 'Synapse library is unavailable to test buildpkg tool with.') self.true(os.path.isfile(testpkgfp)) argv = [ testpkgfp, ] r = asyncio.run(vt_buildpkg.main(argv)) self.eq(r, 0) import synapse.common as s_common pkgdef = s_common.yamlload(testpkgfp) efiles = set() for dnfo in pkgdef.get('docs'): bname = os.path.basename(dnfo.get('path')) efiles.add(bname) efiles.add(bname.rsplit('.', 1)[0] + '.rst') builddir = os.path.join(testpkgdir, 'docs', '_build') self.eq(efiles, set(os.listdir(builddir)))
def test_deploy_dmonyaml(self): with self.getTestDir() as dirn: outp = self.getTestOutp() argv = [ '--listen', 'tcp://1.2.3.4:8080/', '--module', 'synapse.tests.utils', 'cortex', 'core', dirn ] ret = s_deploy.main(argv, outp) self.eq(ret, 0) d = s_common.yamlload(dirn, 'dmon.yaml') self.eq( d, { 'modules': ['synapse.tests.utils'], 'listen': 'tcp://1.2.3.4:8080/' }) outp.expect('Loaded synapse.tests.utils@') # Sad path outp = self.getTestOutp() argv = ['--listen', 'tcp://1.2.3.4:8081/', 'cortex', 'core2', dirn] ret = s_deploy.main(argv, outp) self.eq(ret, 1) outp.expect('Cannot overwrite existing dmon.yaml file')
def getItems(*paths): items = [] for path in paths: if path.endswith('.json'): item = s_common.jsload(path) if not isinstance(item, list): item = [item] items.append((path, item)) elif path.endswith('.jsonl'): with s_common.genfile(path) as fd: item = list(s_encoding.iterdata(fd, False, format='jsonl')) items.append((path, item)) elif path.endswith(('.yaml', '.yml')): item = s_common.yamlload(path) if not isinstance(item, list): item = [item] items.append((path, item)) elif path.endswith('.mpk') or path.endswith('.nodes'): genr = s_msgpack.iterfile(path) items.append((path, genr)) else: # pragma: no cover logger.warning('Unsupported file path: [%s]', path) return items
def loadOpticWorkflows(pkgdef, path): wdefs = pkgdef['optic']['workflows'] for root, dirs, files in os.walk(path): for name in files: match = wflownamere.match(name) if match is None: logger.warning( 'Skipping workflow "%s" that does not match pattern "%s"' % (name, wflownamere.pattern)) continue wname = match.groups()[0] fullname = s_common.genpath(root, name) if not os.path.isfile(fullname): # pragma: no cover continue wdefs[wname] = s_common.yamlload(fullname)
def getDocData(fp, root=None): ''' Args: fn (str): Name of the file to retrieve the data of. root (str): Optional root path to look for a docdata directory in. Notes: Will detect json/jsonl/yaml/mpk extensions and automatically decode that data if found; otherwise it returns bytes. Defaults to looking for the ``docdata`` directory in the current working directory. This behavior works fine for notebooks nested in the docs directory of synapse; but this root directory that is looked for may be overridden by providing an alternative root. Returns: data: May be deserialized data or bytes. Raises: ValueError if the file does not exist or directory traversal attempted.. ''' fpath = getDocPath(fp, root) if fpath.endswith('.yaml'): return s_common.yamlload(fpath) if fpath.endswith('.json'): return s_common.jsload(fpath) with s_common.genfile(fpath) as fd: if fpath.endswith('.mpk'): return s_msgpack.un(fd.read()) if fpath.endswith('.jsonl'): recs = [] for line in fd.readlines(): recs.append(json.loads(line.decode())) return recs return fd.read()
def getStemCell(dirn): if not os.path.isdir(dirn): mesg = f'Directory {dirn} does not exist!' raise s_exc.NoSuchDir(mesg=mesg) ctorname = os.getenv('SYN_STEM_CELL_CTOR') cellyaml = os.path.join(dirn, 'cell.yaml') if os.path.isfile(cellyaml): conf = s_common.yamlload(cellyaml) ctorname = conf.get('cell:ctor', ctorname) if ctorname is not None: ctorname = ctorname.strip() ctor = s_dyndeps.getDynLocal(ctorname) if ctor is None: raise s_exc.NoSuchCtor(mesg=f'Unable to resolve ctor [{ctorname}]', ctor=ctorname) return ctor mesg = f'No such file: {cellyaml} and SYN_STEM_CELL_CTOR environmt variable is not set.' raise s_exc.NoSuchFile(mesg=mesg, path=cellyaml)
async def runCmdOpts(self, opts): text = opts.get('query') filename = opts.get('file') if bool(text) == bool(filename): self.printf('Cannot use a storm file and manual query together.') self.printf(self.__doc__) return if filename is not None: try: with open(filename, 'r') as fd: text = fd.read() except FileNotFoundError: self.printf('file not found: %s' % (filename, )) return stormopts = {} optsfile = opts.get('optsfile') if optsfile is not None: if not os.path.isfile(optsfile): self.printf('optsfile not found: %s' % (optsfile, )) return stormopts = s_common.yamlload(optsfile) hide_unknown = opts.get('hide-unknown', self._cmd_cli.locs.get('storm:hide-unknown')) core = self.getCmdItem() stormopts.setdefault('repr', True) stormopts.setdefault('path', opts.get('path', False)) showtext = opts.get('show') if showtext is not None: stormopts['show'] = showtext.split(',') editformat = opts['editformat'] if editformat != 'nodeedits': stormopts['editformat'] = editformat nodesfd = None if opts.get('save-nodes'): nodesfd = s_common.genfile(opts.get('save-nodes')) nodesfd.truncate(0) try: async for mesg in core.storm(text, opts=stormopts): await self._cmd_cli.fire('storm:mesg', mesg=mesg) if opts.get('debug'): self.printf(pprint.pformat(mesg)) continue if mesg[0] == 'node': if nodesfd is not None: byts = json.dumps(mesg[1]).encode() nodesfd.write(byts + b'\n') try: func = self.cmdmeths[mesg[0]] except KeyError: if hide_unknown: continue self.printf(repr(mesg), color=UNKNOWN_COLOR) else: func(mesg, opts) except s_exc.SynErr as e: if e.errinfo.get('errx') == 'CancelledError': self.printf('query canceled.') return raise finally: if nodesfd is not None: nodesfd.close()
def _loadYamlPath(self, path): if os.path.isfile(path): return s_common.yamlload(path) return {}
async def loadConfYaml(self, *path): conf = s_common.yamlload(*path) return await self.loadConfDict(conf)
async def __anit__(self, path, **kwargs): await s_base.Base.__anit__(self) kwargs.setdefault('map_size', self.DEFAULT_MAPSIZE) kwargs.setdefault('lockmemory', False) kwargs.setdefault('map_async', True) opts = kwargs self.path = pathlib.Path(path) self.optspath = self.path.with_suffix('.opts.yaml') if self.optspath.exists(): opts.update(s_common.yamlload(self.optspath)) initial_mapsize = opts.get('map_size') if initial_mapsize is None: raise s_exc.BadArg('Slab requires map_size') mdbpath = self.path / 'data.mdb' if mdbpath.exists(): mapsize = max(initial_mapsize, os.path.getsize(mdbpath)) else: mapsize = initial_mapsize # save the transaction deltas in case of error... self.xactops = [] self.recovering = False opts.setdefault('max_dbs', 128) opts.setdefault('writemap', True) self.maxsize = opts.pop('maxsize', None) self.growsize = opts.pop('growsize', self.DEFAULT_GROWSIZE) self.readonly = opts.get('readonly', False) self.lockmemory = opts.pop('lockmemory', False) opts.setdefault('map_async', True) self.mapsize = _mapsizeround(mapsize) if self.maxsize is not None: self.mapsize = min(self.mapsize, self.maxsize) self._saveOptsFile() self.lenv = lmdb.open(path, **opts) self.scans = set() self.dirty = False if self.readonly: self.xact = None self.txnrefcount = 0 else: self._initCoXact() self.resizeevent = threading.Event( ) # triggered when a resize event occurred self.lockdoneevent = asyncio.Event( ) # triggered when a memory locking finished # LMDB layer uses these for status reporting self.locking_memory = False self.prefaulting = False self.max_could_lock = 0 self.lock_progress = 0 self.lock_goal = 0 if self.lockmemory: async def memlockfini(): self.resizeevent.set() await self.memlocktask self.memlocktask = s_coro.executor(self._memorylockloop) self.onfini(memlockfini) self.dbnames = { None: (None, False) } # prepopulate the default DB for speed self.onfini(self._onCoFini) if not self.readonly: self.schedCoro(self._runSyncLoop())