async def docModel(outp, core): coreinfo = await core.getCoreInfo() _, model = coreinfo.get('modeldef')[0] ctors = model.get('ctors') types = model.get('types') forms = model.get('forms') univs = model.get('univs') props = collections.defaultdict(list) ctors = sorted(ctors, key=lambda x: x[0]) univs = sorted(univs, key=lambda x: x[0]) types = sorted(types, key=lambda x: x[0]) forms = sorted(forms, key=lambda x: x[0]) univ_names = {univ[0] for univ in univs} for fname, fnfo, fprops in forms: for prop in fprops: props[fname].append(prop) [v.sort() for k, v in props.items()] dochelp = DocHelp(ctors, types, forms, props, univs) # Validate examples for form, example in dochelp.formhelp.items(): if example is None: continue if example.startswith('('): q = f"[{form}={example}]" else: q = f"[{form}='{example}']" node = False async for (mtyp, mnfo) in core.storm(q, {'editformat': 'none'}): if mtyp in ('init', 'fini'): continue if mtyp == 'err': # pragma: no cover raise s_exc.SynErr(mesg='Invalid example', form=form, example=example, info=mnfo) if mtyp == 'node': node = True if not node: # pramga: no cover raise s_exc.SynErr(mesg='Unable to make a node from example.', form=form, example=example) rst = s_autodoc.RstHelp() rst.addHead('Synapse Data Model - Types', lvl=0) processCtors(rst, dochelp, ctors) processTypes(rst, dochelp, types) rst2 = s_autodoc.RstHelp() rst2.addHead('Synapse Data Model - Forms', lvl=0) processFormsProps(rst2, dochelp, forms, univ_names) processUnivs(rst2, dochelp, univs) return rst, rst2
async def mergeAllowed(self, user=None): ''' Check whether a user can merge a view into its parent. ''' fromlayr = self.layers[0] if self.parent is None: raise s_exc.CantMergeView(mesg=f'Cannot merge a view {self.iden} than has not been forked') parentlayr = self.parent.layers[0] if parentlayr.readonly: raise s_exc.ReadOnlyLayer(mesg="May not merge if the parent's write layer is read-only") for view in self.core.views.values(): if view.parent == self: raise s_exc.CantMergeView(mesg='Cannot merge a view that has children itself') if user is None or user.isAdmin() or user.isAdmin(gateiden=parentlayr.iden): return async with await self.parent.snap(user=user) as snap: splicecount = 0 async for nodeedit in fromlayr.iterLayerNodeEdits(): async for offs, splice in fromlayr.makeSplices(0, [nodeedit], None): check = self.permCheck.get(splice[0]) if check is None: raise s_exc.SynErr(mesg='Unknown splice type, cannot safely merge', splicetype=splice[0]) await check(user, snap, splice[1]) splicecount += 1 if splicecount % 1000 == 0: await asyncio.sleep(0)
def __enter__(self): ''' Convenience function to enable using Proxy objects as synchronous context managers. Note: This must not be used from async code, and it should never be used in core synapse code. ''' if s_threads.iden() == self.tid: raise s_exc.SynErr('Use of synchronous context manager in async code') self._ctxobj = self.schedCoroSafePend(self.__aenter__()) return self
def __enter__(self): ''' Convenience function to enable using Proxy objects as synchronous context managers. Note: This should never be used by synapse core code. This is for sync client code convenience only. ''' if s_threads.iden() == self.tid: raise s_exc.SynErr('Use of synchronous context manager in async code') self._ctxobj = self.schedCoroSafePend(self.__aenter__()) return self
def encodeMsg(self, mesg): '''Get byts for a message''' fmt = self.locs.get('log:fmt') if fmt == 'jsonl': s = json.dumps(mesg, sort_keys=True) + '\n' buf = s.encode() return buf elif fmt == 'mpk': buf = s_msgpack.en(mesg) return buf mesg = f'Unknown encoding format: {fmt}' raise s_exc.SynErr(mesg=mesg)
def result(retn): ''' Return a value or raise an exception from a retn tuple. ''' ok, valu = retn if ok: return valu name, info = valu ctor = getattr(s_exc, name, None) if ctor is not None: raise ctor(**info) info['errx'] = name raise s_exc.SynErr(**info)
def __init__(self, path=None): self.crypto_numbits = 4096 self.signing_digest = 'sha256' if path is None: path = defdir self.certdirs = [] # for backward compatibility, do some type inspection if isinstance(path, str): self.certdirs.append(s_common.gendir(path)) elif isinstance(path, (tuple, list)): [self.certdirs.append(s_common.gendir(p)) for p in path] else: mesg = 'Certdir path must be a path string or a list/tuple of path strings.' raise s_exc.SynErr(mesg=mesg) for cdir in self.certdirs: s_common.gendir(cdir, 'cas') s_common.gendir(cdir, 'hosts') s_common.gendir(cdir, 'users')
def test_common_getexcfo(self): try: 1 / 0 except ZeroDivisionError as e: excfo = s_common.getexcfo(e) self.istufo(excfo) self.eq(excfo[0], 'ZeroDivisionError') self.isin('msg', excfo[1]) self.isin('file', excfo[1]) self.isin('line', excfo[1]) self.isin('name', excfo[1]) self.isin('src', excfo[1]) self.notin('syn:err', excfo[1]) excfo = s_common.getexcfo(s_exc.SynErr(mesg='hehe', key=1)) self.eq(excfo[0], 'SynErr') self.isin('msg', excfo[1]) self.isin('file', excfo[1]) self.isin('line', excfo[1]) self.isin('name', excfo[1]) self.isin('src', excfo[1]) self.isin('syn:err', excfo[1]) self.eq(excfo[1].get('syn:err'), {'mesg': 'hehe', 'key': 1})
async def doathingauto3(self, eventdict): raise s_exc.SynErr(mesg='Test error')
async def setModelVers(self, vers): raise s_exc.SynErr(mesg='setModelVers not allowed!')
async def onlinkExc(p): cnts['exc'] += 1 raise s_exc.SynErr(mesg='ohhai')
async def badgenr(): yield 'foo' await asyncio.sleep(0) raise s_exc.SynErr('rando')
async def mockaddsvc(self, name, info, network=None): if getattr(self, 'testerr', False): raise s_exc.SynErr(mesg='newp') return await realaddsvc(self, name, info, network=network)
async def badSetStormSvcEvents(iden, evts): badiden.append(iden) raise s_exc.SynErr('Kaboom')
async def agenrboom(self): yield 10 yield 20 raise s_exc.SynErr(mesg='derp')
async def badRunStormSvcAdd(iden): badiden.append(iden) raise s_exc.SynErr('Kaboom')
def main(argv, outp=None): if outp is None: # pragma: no cover outp = s_output.OutPut() pars = makeargparser() opts = pars.parse_args(argv) axon = s_telepath.openurl(opts.axon) core = None if opts.cortex: core = s_telepath.openurl(opts.cortex) tags = {} if opts.tags: for tag in opts.tags.split(','): tags[tag] = (None, None) if tags: outp.printf('adding tags: %r' % (list(tags.keys()))) filepaths = set() for item in opts.filenames: paths = glob.glob(item, recursive=opts.recursive) if not paths: outp.printf(f'filepath does not contain any files: {item}') continue filepaths.update([path for path in paths if os.path.isfile(path)]) for path in filepaths: bname = os.path.basename(path) hset = s_hashset.HashSet() with s_common.reqfile(path) as fd: hset.eatfd(fd) fhashes = {htyp: hasher.hexdigest() for htyp, hasher in hset.hashes} sha256 = fhashes.get('sha256') bsha256 = s_common.uhex(sha256) if not axon.has(bsha256): with axon.upload() as upfd: with s_common.genfile(path) as fd: for byts in s_common.iterfd(fd): upfd.write(byts) size, hashval = upfd.save() if hashval != bsha256: # pragma: no cover raise s_exc.SynErr(mesg='hashes do not match', ehash=s_common.ehex(hashval), ahash=hashval) outp.printf(f'Uploaded [{bname}] to axon') else: outp.printf(f'Axon already had [{bname}]') if core: pnode = (('file:bytes', f'sha256:{sha256}'), { 'props': { 'md5': fhashes.get('md5'), 'sha1': fhashes.get('sha1'), 'sha256': fhashes.get('sha256'), 'size': hset.size, 'name': bname, }, 'tags': tags, }) node = list(core.addNodes([pnode]))[0] iden = node[0][1] size = node[1]['props']['size'] name = node[1]['props']['name'] mesg = f'file: {bname} ({size}) added to core ({iden}) as {name}' outp.printf(mesg) s_glob.sync(axon.fini()) if core: s_glob.sync(core.fini()) return 0
async def main(argv, outp=None): if outp is None: # pragma: no cover outp = s_output.OutPut() path = s_common.getSynPath('telepath.yaml') telefini = await s_telepath.loadTeleEnv(path) pars = makeargparser() opts = pars.parse_args(argv) axon = await s_telepath.openurl(opts.axon) core = None if opts.cortex: core = await s_telepath.openurl(opts.cortex) tags = set() if opts.tags: for tag in opts.tags.split(','): tags.add(tag) tags = tuple(tags) if tags: outp.printf(f'adding tags: {tags}') filepaths = set() for item in opts.filenames: paths = glob.glob(item, recursive=opts.recursive) if not paths: outp.printf(f'filepath does not contain any files: {item}') continue filepaths.update([path for path in paths if os.path.isfile(path)]) for path in filepaths: bname = os.path.basename(path) hset = s_hashset.HashSet() with s_common.reqfile(path) as fd: hset.eatfd(fd) fhashes = {htyp: hasher.hexdigest() for htyp, hasher in hset.hashes} sha256 = fhashes.get('sha256') bsha256 = s_common.uhex(sha256) if not await axon.has(bsha256): async with await axon.upload() as upfd: with s_common.genfile(path) as fd: for byts in s_common.iterfd(fd): await upfd.write(byts) size, hashval = await upfd.save() if hashval != bsha256: # pragma: no cover raise s_exc.SynErr(mesg='hashes do not match', ehash=s_common.ehex(hashval), ahash=hashval) outp.printf(f'Uploaded [{bname}] to axon') else: outp.printf(f'Axon already had [{bname}]') if core: opts = { 'vars': { 'md5': fhashes.get('md5'), 'sha1': fhashes.get('sha1'), 'sha256': fhashes.get('sha256'), 'size': hset.size, 'name': bname, 'tags': tags, } } q = '[file:bytes=$sha256 :md5=$md5 :sha1=$sha1 :size=$size :name=$name] ' \ '{ for $tag in $tags { [+#$tag] } }' msgs = await core.storm(q, opts=opts).list() node = [m[1] for m in msgs if m[0] == 'node'][0] iden = node[0][1] size = node[1]['props']['size'] name = node[1]['props']['name'] mesg = f'file: {bname} ({size}) added to core ({iden}) as {name}' outp.printf(mesg) await axon.fini() if core: await core.fini() if telefini: # pragma: no cover await telefini() return 0
async def setModelVers(self, vers): await self._readyPlayerOne() raise s_exc.SynErr(mesg='setModelVers not allowed!')