Exemple #1
0
def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    p = getArgParser()
    opts = p.parse_args(argv)

    core = s_telepath.openurl(opts.cortex)

    tags = []
    if opts.tags:
        for tag in opts.tags.split(','):
            tags.append(tag)

    if tags:
        outp.printf('adding tags: %r' % (tags, ))

    for path in opts.filenames:

        with open(path, 'rb') as fd:

            base = os.path.basename(path)
            node = core.formNodeByFd(fd, name=base)

            core.addTufoTags(node, tags)

            iden = node[1].get('file:bytes')
            size = node[1].get('file:bytes:size')
            name = node[1].get('file:bytes:name')

            outp.printf('file: %s (%d) added (%s) as %s' %
                        (base, size, iden, name))
Exemple #2
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = getArgParser()
    opts = pars.parse_args(argv)

    for path in opts.paths:

        if not path.endswith('.json'):
            outp.printf('skip: %s (not .json extension)' % (path, ))
            continue

        if not os.path.isfile(path):
            outp.printf('skip: %s (not a file)' % (path, ))
            continue

        base = path[:-5]
        newp = base + '.mpk'

        outp.printf('converting: %s -> .mpk' % (path, ))
        with open(path, 'r', encoding='utf8') as fd:
            with open(newp, 'wb') as pk:
                for line in fd:
                    item = json.loads(line)
                    pk.write(s_msgpack.en(item))

        if opts.rm:
            os.unlink(path)
Exemple #3
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()
    parser = makeargpaser()
    opts = parser.parse_args(argv)

    if not opts.verbose:
        logging.disable(logging.DEBUG)

    if os.path.isfile(opts.output) and not opts.force:
        outp.printf('Cannot overwrite a backup.')
        return 1

    genrows_kwargs = {}
    if opts.extra_args:
        with open(opts.extra_args, 'rb') as fd:
            genrows_kwargs = json.loads(fd.read().decode())

    storconf = {'rev:storage': False}
    if opts.revstorage:
        storconf['rev:storage'] = True

    backup_tufo = gen_backup_tufo(opts)

    with open(opts.output, 'wb') as fd:
        fd.write(s_msgpack.en(backup_tufo))
        with s_cortex.openstore(opts.store, storconf=storconf) as store:
            dump_store(outp, fd, store,
                       compress=opts.compress,
                       dump_blobstore=opts.dump_blobstore,
                       genrows_kwargs=genrows_kwargs)

    outp.printf('Fin')
    return 0
Exemple #4
0
def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    pars = argparse.ArgumentParser(prog='autodoc', description=descr)

    #pars.add_argument('--format', default='rst')
    pars.add_argument('--cortex', default='ram://', help='Cortex URL for model inspection')
    pars.add_argument('--doc-model', action='store_true', default=False, help='Generate RST docs for the DataModel within a cortex')
    pars.add_argument('--configable-opts', action='store_true', default=False, help='Generate RST docs of the Configable classes in Synapse.')
    pars.add_argument('--savefile', default=None, help='Save output to the given file')

    opts = pars.parse_args(argv)
    fd = None
    if opts.savefile:
        fd = open(opts.savefile, 'wb')
        outp = s_output.OutPutFd(fd)

    if opts.doc_model:
        with s_cortex.openurl(opts.cortex) as core:
            return docModel(outp, fd, core)

    if opts.configable_opts:
        return docConfigables(outp, fd)
Exemple #5
0
async def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    pars = makeargparser()

    opts = pars.parse_args(argv)

    if opts.doc_model:

        if opts.cortex:
            async with await s_telepath.openurl(opts.cortex) as core:
                rsttypes, rstforms = await docModel(outp, core)

        else:
            async with s_cortex.getTempCortex() as core:
                rsttypes, rstforms = await docModel(outp, core)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, 'datamodel_types.rst'),
                      'wb') as fd:
                fd.write(rsttypes.getRstText().encode())
            with open(s_common.genpath(opts.savedir, 'datamodel_forms.rst'),
                      'wb') as fd:
                fd.write(rstforms.getRstText().encode())

    return 0
Exemple #6
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()
    parser = makeargpaser()
    opts = parser.parse_args(argv)

    if not opts.verbose:
        logging.disable(logging.DEBUG)

    # Check to see if we're working with a savefile or a dumprows file
    decompress = False
    discard_first_event = False
    with open(opts.input, 'rb') as fd:
        gen = s_msgpack.iterfd(fd)
        tufo0 = next(gen)
        if tufo0[0] == 'syn:cortex:rowdump:info':
            outp.printf('Restoring from a dumprows file.')
            discard_first_event = True
            decompress = tufo0[1].get('rows:compress')
            if decompress:
                outp.printf('Gzip row compression enabled.')
        else:
            outp.printf('Restoring from a savefile')
        # No longer need that generator around with the dangler to fd
        del gen

    storconf = {'rev:storage': False}
    if opts.revstorage:  # pragma: no cover
        storconf['rev:storage'] = True

    with open(opts.input, 'rb') as fd:
        gen = s_msgpack.iterfd(fd)
        if discard_first_event:
            next(gen)
        with s_cortex.openstore(opts.store, storconf=storconf) as store:
            outp.printf('Starting row level restore')
            tick = time.time()
            i = 0
            nrows = 0
            for event in gen:
                if decompress and 'rows' in event[1]:
                    event[1]['rows'] = s_msgpack.un(
                        gzip.decompress(event[1].get('rows')))
                i += 1
                if i % 250 == 0:
                    outp.printf('Loaded {} events'.format(i))
                store.loadbus.dist(event)
                _nrows = len(event[1].get('rows', ()))
                nrows += _nrows
                if _nrows and i % 10 == 0:
                    logger.debug('Loaded %s rows', nrows)

            tock = time.time()
            outp.printf('Done loading events - took {} seconds.'.format(tock -
                                                                        tick))
    outp.printf('Fin')
    return 0
Exemple #7
0
async def main(argv, outp=None):
    if outp is None:
        outp = s_output.OutPut()

    pars = makeargparser()

    opts = pars.parse_args(argv)

    if opts.doc_model:

        if opts.cortex:
            async with await s_telepath.openurl(opts.cortex) as core:
                rsttypes, rstforms = await docModel(outp, core)

        else:
            async with s_cortex.getTempCortex() as core:
                rsttypes, rstforms = await docModel(outp, core)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, 'datamodel_types.rst'), 'wb') as fd:
                fd.write(rsttypes.getRstText().encode())
            with open(s_common.genpath(opts.savedir, 'datamodel_forms.rst'), 'wb') as fd:
                fd.write(rstforms.getRstText().encode())

    if opts.doc_conf:
        confdocs, cname = await docConfdefs(opts.doc_conf,
                                            reflink=opts.doc_conf_reflink,
                                            doc_title=opts.doc_conf_title,
                                            )

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'conf_{cname.lower()}.rst'), 'wb') as fd:
                fd.write(confdocs.getRstText().encode())

    if opts.doc_storm:
        confdocs, svcname = await docStormsvc(opts.doc_storm)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'stormsvc_{svcname.lower()}.rst'), 'wb') as fd:
                fd.write(confdocs.getRstText().encode())

    if opts.doc_stormpkg:
        pkgdocs, pkgname = await docStormpkg(opts.doc_stormpkg)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'stormpkg_{pkgname.lower()}.rst'), 'wb') as fd:
                fd.write(pkgdocs.getRstText().encode())

    if opts.doc_stormtypes:
        libdocs, typedocs = await docStormTypes()
        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'stormtypes_libs.rst'), 'wb') as fd:
                fd.write(libdocs.getRstText().encode())
            with open(s_common.genpath(opts.savedir, f'stormtypes_prims.rst'), 'wb') as fd:
                fd.write(typedocs.getRstText().encode())

    return 0
Exemple #8
0
async def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = makeargparser()
    opts = pars.parse_args(argv)

    if opts.offset:
        if len(opts.files) > 1:
            outp.printf(
                'Cannot start from a arbitrary offset for more than 1 file.')
            return 1

        outp.printf(
            f'Starting from offset [{opts.offset}] - it may take a while'
            f' to get to that location in the input file.')

    if opts.test:
        async with s_cortex.getTempCortex(mods=opts.modules) as prox:
            await addFeedData(prox,
                              outp,
                              opts.format,
                              opts.debug,
                              chunksize=opts.chunksize,
                              offset=opts.offset,
                              *opts.files)

    elif opts.cortex:
        async with await s_telepath.openurl(opts.cortex) as core:
            try:
                s_version.reqVersion(core._getSynVers(), reqver)
            except s_exc.BadVersion as e:
                valu = s_version.fmtVersion(*e.get('valu'))
                outp.printf(
                    f'Cortex version {valu} is outside of the feed tool supported range ({reqver}).'
                )
                outp.printf(
                    f'Please use a version of Synapse which supports {valu}; '
                    f'current version is {s_version.verstring}.')
                return 1
            await addFeedData(core,
                              outp,
                              opts.format,
                              opts.debug,
                              chunksize=opts.chunksize,
                              offset=opts.offset,
                              *opts.files)

    else:  # pragma: no cover
        outp.printf('No valid options provided [%s]', opts)
        return 1

    return 0
Exemple #9
0
def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    p = getArgParser()
    opts = p.parse_args(argv)

    log_level = os.getenv('SYN_DMON_LOG_LEVEL', opts.log_level)
    if log_level:  # pragma: no cover
        log_level = log_level.upper()
        if log_level not in LOG_LEVEL_CHOICES:
            raise ValueError(
                'Invalid log level provided: {}'.format(log_level))
        logging.basicConfig(
            level=log_level,
            format=
            '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(funcName)s]'
        )
        logger.info('log level set to ' + log_level)

    if opts.lsboot:
        for path in lsboot():
            outp.printf(path)
        return

    if opts.onboot:
        plat = s_thishost.get('platform')
        if plat not in ('linux', 'darwin'):
            raise Exception('--onboot does not support platform: %s' %
                            (plat, ))

        for path in opts.configs:
            logger.info('onboot add: %s' % (path, ))
            onboot(path)

        return

    if opts.noboot:
        for path in opts.configs:
            logger.info('onboot del: %s' % (path, ))
            noboot(path)
        return

    dmon = s_daemon.Daemon()

    if opts.asboot:
        dmon.loadDmonFile(cfgfile)

    for path in opts.configs:
        dmon.loadDmonFile(path)

    dmon.main()
Exemple #10
0
    def __init__(self, outp=None):
        EventBus.__init__(self)

        if outp == None:
            outp = s_output.OutPut()

        self.outp = outp

        self.cmds = {}
        self.cmdprompt = 'cli> '

        self.addCmdClass( CmdHelp )
        self.addCmdClass( CmdQuit )
Exemple #11
0
async def main(argv, outprint=None):
    if outprint is None:   # pragma: no cover
        outprint = s_output.OutPut()
    global outp
    outp = outprint

    pars = makeargparser()
    try:
        opts = pars.parse_args(argv)
    except s_exc.ParserExit:
        return -1

    return await opts.func(opts)
Exemple #12
0
def main(argv, outp=None):
    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = makeargpaser()
    opts = pars.parse_args(argv)

    if opts.module:
        mod = s_dyndeps.tryDynMod(opts.module)
        outp.printf(f'Loaded {opts.module}@{mod}')

    if opts.cells:
        outp.printf('Registered cells:')
        for cname, cpath in s_cells.getCells():
            outp.printf(f'{cname:<10} {cpath:>10}')
        return 0

    dirn = s_common.genpath(opts.dmonpath, 'cells', opts.cellname)
    if os.path.isdir(dirn):
        outp.printf(f'cell directory already exists: {dirn}')
        return 1

    dmon = {}
    if opts.listen:
        dmon['listen'] = opts.listen

    if opts.module:
        dmon['modules'] = [opts.module]

    if dmon:
        dmon.setdefault('modules', [])
        dmon_fp = os.path.join(opts.dmonpath, 'dmon.yaml')
        if os.path.exists(dmon_fp):
            outp.printf(f'Cannot overwrite existing dmon.yaml file. [{dmon_fp}]')
            return 1
        s_common.yamlsave(dmon, dmon_fp)

    boot = {
        'cell:name': opts.cellname,
    }

    if opts.auth:
        boot['auth:en'] = True

    if opts.admin:
        boot['auth:en'] = True
        boot['auth:admin'] = opts.admin

    outp.printf(f'Deploying a {opts.celltype} at: {dirn}')
    s_cells.deploy(opts.celltype, dirn, boot)
    return 0
Exemple #13
0
    def __init__(self, item, outp=None, **locs):
        EventBus.__init__(self)

        if outp is None:
            outp = s_output.OutPut()

        self.outp = outp
        self.locs = locs
        self.item = item  # whatever object we are commanding

        self.cmds = {}
        self.cmdprompt = 'cli> '

        self.addCmdClass(CmdHelp)
        self.addCmdClass(CmdQuit)
Exemple #14
0
async def main(argv, outp=None):

    pars = setup()
    opts = pars.parse_args(argv)

    path = s_common.getSynPath('telepath.yaml')
    telefini = await s_telepath.loadTeleEnv(path)

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    if opts.output is None:
        opts.output = '.'

    outdir = pathlib.Path(opts.output)

    s_common.gendir(opts.output)

    async with await s_telepath.openurl(opts.axon) as axon:

        # reminder: these are the hashes *not* available

        awants = await axon.wants([s_common.uhex(h) for h in opts.hashes])
        for a in awants:
            outp.printf(f'{s_common.ehex(a)} not in axon store')

        exists = [h for h in opts.hashes if s_common.uhex(h) not in awants]

        for h in exists:

            try:
                outp.printf(f'Fetching {h} to file')

                with open(outdir.joinpath(h), 'wb') as fd:
                    async for b in axon.get(s_common.uhex(h)):
                        fd.write(b)

                outp.printf(f'Fetched {h} to file')

            except Exception as e:
                outp.printf('Error: Hit Exception: %s' % (str(e), ))
                continue

    if telefini:  # pragma: no cover
        await telefini()

    return 0
Exemple #15
0
def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    p = getArgParser()
    opts = p.parse_args(argv)

    if opts.log_level:
        logging.basicConfig(level=opts.log_level.upper())
        logger.info('log level set to ' + opts.log_level)

    if opts.lsboot:
        for path in lsboot():
            outp.printf(path)
        return

    if opts.onboot:
        plat = s_thishost.get('platform')
        if plat not in ('linux', 'darwin'):
            raise Exception('--onboot does not support platform: %s' %
                            (plat, ))

        for path in opts.configs:
            logger.info('onboot add: %s' % (path, ))
            onboot(path)

        return

    if opts.noboot:
        for path in opts.configs:
            logger.info('onboot del: %s' % (path, ))
            noboot(path)
        return

    dmon = s_daemon.Daemon()

    if opts.asboot:
        dmon.loadDmonFile(cfgfile)

    for path in opts.configs:
        dmon.loadDmonFile(path)

    dmon.main()
Exemple #16
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = makeargparser()
    opts = pars.parse_args(argv)

    if opts.offset:
        if len(opts.files) > 1:
            outp.printf(
                'Cannot start from a arbitrary offset for more than 1 file.')
            return 1

        outp.printf(
            f'Starting from offset [{opts.offset}] - it may take a while'
            f' to get to that location in the input file.')

    if opts.test:
        with s_cortex.getTempCortex(opts.modules) as prox:
            addFeedData(prox,
                        outp,
                        opts.format,
                        opts.debug,
                        chunksize=opts.chunksize,
                        offset=opts.offset,
                        *opts.files)

    elif opts.cortex:
        with s_telepath.openurl(opts.cortex) as core:
            addFeedData(core,
                        outp,
                        opts.format,
                        opts.debug,
                        chunksize=opts.chunksize,
                        offset=opts.offset,
                        *opts.files)

    else:  # pragma: no cover
        outp.printf('No valid options provided [%s]', opts)
        return 1

    return 0
Exemple #17
0
def main(argv, outp=None):

    pars = setup()
    opts = pars.parse_args(argv)

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    if opts.output is None:
        opts.output = '.'

    outdir = pathlib.Path(opts.output)

    s_common.gendir(opts.output)

    with s_telepath.openurl(opts.axon) as axon:

        # reminder: these are the hashes *not* available
        awants = axon.wants([binascii.unhexlify(h) for h in opts.hashes])
        for a in awants:
            outp.printf(f'{binascii.hexlify(a)} not in axon store')

        exists = [
            h for h in opts.hashes if binascii.unhexlify(h) not in awants
        ]

        for h in exists:

            try:
                outp.printf(f'Fetching {h} to file')

                with open(outdir.joinpath(h), 'wb') as fd:
                    for b in axon.get(binascii.unhexlify(h)):
                        fd.write(b)

                outp.printf(f'Fetched {h} to file')

            except Exception as e:
                outp.printf('Error: Hit Exception: %s' % (str(e), ))
                continue

    return 0
Exemple #18
0
    async def __anit__(self, item, outp=None, **locs):

        await s_base.Base.__anit__(self)

        # Tell prompt_toolkit to use the asyncio event loop.
        use_asyncio_event_loop()

        if outp is None:
            outp = s_output.OutPut()

        self.outp = outp
        self.locs = locs
        self.cmdtask = None  # type: asyncio.Task

        self.sess = None
        self.vi_mode = _inputrc_enables_vi_mode()

        self.item = item  # whatever object we are commanding

        self.echoline = False
        self.colorsenabled = False

        if isinstance(self.item, s_base.Base):
            self.item.onfini(self._onItemFini)

        self.locs['syn:local:version'] = s_version.verstring

        if isinstance(self.item, s_telepath.Proxy):
            version = self.item._getSynVers()
            if version is None:  # pragma: no cover
                self.locs[
                    'syn:remote:version'] = 'Remote Synapse version unavailable'
            else:
                self.locs['syn:remote:version'] = '.'.join(
                    [str(v) for v in version])

        self.cmds = {}
        self.cmdprompt = 'cli> '

        self.addCmdClass(CmdHelp)
        self.addCmdClass(CmdQuit)
        self.addCmdClass(CmdLocals)
Exemple #19
0
async def main(argv, outprint=None):
    if outprint is None:  # pragma: no cover
        outprint = s_output.OutPut()
    global outp
    outp = outprint

    async with contextlib.AsyncExitStack() as cm:

        teleyaml = s_common.getSynPath('telepath.yaml')
        if os.path.isfile(teleyaml):
            fini = await s_telepath.loadTeleEnv(teleyaml)
            cm.push_async_callback(fini)

        pars = makeargparser()
        try:
            opts = pars.parse_args(argv)
        except s_exc.ParserExit:
            return -1

        retn = await opts.func(opts)

    return retn
Exemple #20
0
    def __init__(self, item, outp=None, **locs):
        s_eventbus.EventBus.__init__(self)

        if outp is None:
            outp = s_output.OutPut()

        self.outp = outp
        self.locs = locs
        self.item = item    # whatever object we are commanding

        self.echoline = False
        self.finikill = False
        self.loopthread = None

        if isinstance(item, (s_base.Base, s_eventbus.EventBus)):
            self.item.onfini(self._onItemFini)

        self.cmds = {}
        self.cmdprompt = 'cli> '

        self.addCmdClass(CmdHelp)
        self.addCmdClass(CmdQuit)
        self.addCmdClass(CmdLocals)
Exemple #21
0
def main(argv, outp=None):
    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    p = makeargpaser()
    opts = p.parse_args(args=argv)

    results = []
    for fp in opts.input:
        try:
            guid, hashd = compute_hashes(fp=fp)
        except:
            outp.printf('Failed to compute superhash for {}'.format(fp))
        else:
            results.append((fp, guid, hashd))

    if opts.ingest:
        ret = []
        for fp, guid, hashd in results:
            hashd['name'] = os.path.basename(fp)
            d = {"props": hashd}
            hl = [guid, d]
            ret.append(hl)
        if len(ret) == 1:
            ret = ret[0]
        outp.printf(json.dumps(ret, sort_keys=True, indent=2))
    else:
        for fp, guid, hashd in results:
            outp.printf('Superhash for: {}'.format(fp))
            hashd['guid'] = guid
            keys = list(hashd.keys())
            keys.sort()
            for key in keys:
                value = hashd.get(key)
                outp.printf('{}\t{}'.format(key, value))

    return 0
Exemple #22
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = makeargparser()
    opts = pars.parse_args(argv)

    axon = s_telepath.openurl(opts.axon)

    core = None
    if opts.cortex:
        core = s_telepath.openurl(opts.cortex)

        tags = {}
        if opts.tags:
            for tag in opts.tags.split(','):
                tags[tag] = (None, None)

        if tags:
            outp.printf('adding tags: %r' % (list(tags.keys())))

    filepaths = set()
    for item in opts.filenames:
        paths = glob.glob(item, recursive=opts.recursive)

        if not paths:
            outp.printf(f'filepath does not contain any files: {item}')
            continue

        filepaths.update([path for path in paths if os.path.isfile(path)])

    for path in filepaths:

        bname = os.path.basename(path)

        hset = s_hashset.HashSet()
        with s_common.reqfile(path) as fd:
            hset.eatfd(fd)

        fhashes = {htyp: hasher.hexdigest() for htyp, hasher in hset.hashes}

        sha256 = fhashes.get('sha256')
        bsha256 = s_common.uhex(sha256)

        if not axon.has(bsha256):

            with axon.upload() as upfd:

                with s_common.genfile(path) as fd:
                    for byts in s_common.iterfd(fd):
                        upfd.write(byts)

                size, hashval = upfd.save()

            if hashval != bsha256:  # pragma: no cover
                raise s_exc.SynErr(mesg='hashes do not match',
                                   ehash=s_common.ehex(hashval),
                                   ahash=hashval)

            outp.printf(f'Uploaded [{bname}] to axon')
        else:
            outp.printf(f'Axon already had [{bname}]')

        if core:
            pnode = (('file:bytes', f'sha256:{sha256}'), {
                'props': {
                    'md5': fhashes.get('md5'),
                    'sha1': fhashes.get('sha1'),
                    'sha256': fhashes.get('sha256'),
                    'size': hset.size,
                    'name': bname,
                },
                'tags': tags,
            })

            node = list(core.addNodes([pnode]))[0]

            iden = node[0][1]
            size = node[1]['props']['size']
            name = node[1]['props']['name']
            mesg = f'file: {bname} ({size}) added to core ({iden}) as {name}'
            outp.printf(mesg)

    s_glob.sync(axon.fini())
    if core:
        s_glob.sync(core.fini())
    return 0
Exemple #23
0
async def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    path = s_common.getSynPath('telepath.yaml')
    telefini = await s_telepath.loadTeleEnv(path)

    pars = makeargparser()
    opts = pars.parse_args(argv)

    axon = await s_telepath.openurl(opts.axon)

    core = None
    if opts.cortex:
        core = await s_telepath.openurl(opts.cortex)

        tags = set()
        if opts.tags:
            for tag in opts.tags.split(','):
                tags.add(tag)

        tags = tuple(tags)
        if tags:
            outp.printf(f'adding tags: {tags}')

    filepaths = set()
    for item in opts.filenames:
        paths = glob.glob(item, recursive=opts.recursive)

        if not paths:
            outp.printf(f'filepath does not contain any files: {item}')
            continue

        filepaths.update([path for path in paths if os.path.isfile(path)])

    for path in filepaths:

        bname = os.path.basename(path)

        hset = s_hashset.HashSet()
        with s_common.reqfile(path) as fd:
            hset.eatfd(fd)

        fhashes = {htyp: hasher.hexdigest() for htyp, hasher in hset.hashes}

        sha256 = fhashes.get('sha256')
        bsha256 = s_common.uhex(sha256)

        if not await axon.has(bsha256):

            async with await axon.upload() as upfd:

                with s_common.genfile(path) as fd:
                    for byts in s_common.iterfd(fd):
                        await upfd.write(byts)

                size, hashval = await upfd.save()

            if hashval != bsha256:  # pragma: no cover
                raise s_exc.SynErr(mesg='hashes do not match',
                                   ehash=s_common.ehex(hashval),
                                   ahash=hashval)

            outp.printf(f'Uploaded [{bname}] to axon')
        else:
            outp.printf(f'Axon already had [{bname}]')

        if core:
            opts = {
                'vars': {
                    'md5': fhashes.get('md5'),
                    'sha1': fhashes.get('sha1'),
                    'sha256': fhashes.get('sha256'),
                    'size': hset.size,
                    'name': bname,
                    'tags': tags,
                }
            }

            q = '[file:bytes=$sha256 :md5=$md5 :sha1=$sha1 :size=$size :name=$name] ' \
                '{ for $tag in $tags { [+#$tag] } }'

            msgs = await core.storm(q, opts=opts).list()
            node = [m[1] for m in msgs if m[0] == 'node'][0]

            iden = node[0][1]
            size = node[1]['props']['size']
            name = node[1]['props']['name']
            mesg = f'file: {bname} ({size}) added to core ({iden}) as {name}'
            outp.printf(mesg)

    await axon.fini()
    if core:
        await core.fini()

    if telefini:  # pragma: no cover
        await telefini()

    return 0
Exemple #24
0
def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    pars = argparse.ArgumentParser(prog='easycert', description=descr)

    pars.add_argument('--certdir',
                      default='~/.syn/certs',
                      help='Directory for certs/keys')
    pars.add_argument('--signas',
                      help='sign the new cert with the given cert name')
    pars.add_argument('--ca',
                      default=False,
                      action='store_true',
                      help='mark the certificate as a CA/CRL signer')
    pars.add_argument('--server',
                      default=False,
                      action='store_true',
                      help='mark the certificate as a server')
    pars.add_argument('--server-sans',
                      help='server cert subject alternate names')
    pars.add_argument('--csr',
                      default=False,
                      action='store_true',
                      help='generate a cert signing request')
    pars.add_argument('--sign-csr',
                      default=False,
                      action='store_true',
                      help='sign a cert signing request')
    pars.add_argument(
        'name',
        help='common name for the certificate (or filename for CSR signing)')

    opts = pars.parse_args(argv)

    cdir = s_certdir.CertDir(path=opts.certdir)

    try:

        if opts.sign_csr:

            if opts.signas is None:
                outp.printf('--sign-csr requires --signas')
                return -1

            xcsr = cdir._loadCsrPath(opts.name)
            if xcsr is None:
                outp.printf('csr not found: %s' % (opts.name, ))
                return -1

            if opts.server:
                cdir.signHostCsr(xcsr, opts.signas, outp=outp)
                return 0

            cdir.signUserCsr(xcsr, opts.signas, outp=outp)
            return 0

        if opts.csr:

            if opts.ca:
                cdir.genCaCsr(opts.name, outp=outp)
                raise Exception('CSR for CA cert not supported (yet)')

            if opts.server:
                cdir.genHostCsr(opts.name, outp=outp)
                return 0

            cdir.genUserCsr(opts.name, outp=outp)
            return 0

        if opts.ca:
            cdir.genCaCert(opts.name, signas=opts.signas, outp=outp)
            return 0

        if opts.server:
            cdir.genHostCert(opts.name,
                             signas=opts.signas,
                             outp=outp,
                             sans=opts.server_sans)
            return 0

        cdir.genUserCert(opts.name, signas=opts.signas, outp=outp)
        return 0

    except s_common.DupFileName as e:
        outp.printf('file exists: %s' % (e.errinfo.get('path'), ))
        return -1
Exemple #25
0
def main(argv, outp=None):
    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    outp.printf(s_common.guid())
Exemple #26
0
 def test_output(self):
     outp = s_output.OutPut()
     outp.printf('foo')
     outp.printf('bar')
Exemple #27
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = argparse.ArgumentParser(prog='ingest', description='Command line tool for ingesting data into a cortex')

    pars.add_argument('--core', default='ram://', help='Cortex to use for ingest deconfliction')
    pars.add_argument('--progress', default=False, action='store_true', help='Print loading progress')
    pars.add_argument('--sync', default=None, help='Sync to an additional cortex')
    pars.add_argument('--save', default=None, help='Save cortex sync events to a file')
    pars.add_argument('--debug', default=False, action='store_true', help='Drop to interactive prompt to inspect cortex')
    pars.add_argument('--verbose', default=False, action='store_true', help='Show changes to local cortex incrementally')
    pars.add_argument('files', nargs='*', help='JSON ingest definition files')

    opts = pars.parse_args(argv)

    core = s_cortex.openurl(opts.core)
    try:
        s_telepath.reqNotProxy(core)
    except s_common.MustBeLocal:
        outp.printf('Ingest requires a local cortex to deconflict against, not a Telepath proxy')
        raise
    core.setConfOpt('enforce', 1)

    if opts.debug:  # pragma: no cover
        core.setConfOpt('log:save', 1)

    # FIXME check for telepath proxy and bitch.
    # this core may not be remote because we use
    # the transaction API.

    def _print_tufo_add(mesg):
        tufo = mesg[1].get('node')
        form = tufo[1].get('tufo:form')
        outp.printf('add: %s=%s' % (form, tufo[1].get(form)))
        for prop, valu in sorted(s_tufo.props(tufo).items()):
            outp.printf('       :%s = %s' % (prop, valu))

    def _print_tufo_tag_add(mesg):
        tag = mesg[1].get('tag')
        tufo = mesg[1].get('node')
        form = tufo[1].get('tufo:form')
        outp.printf('tag: %s=%s (%s)' % (form, tufo[1].get(form), tag))

    progtot = collections.defaultdict(int)
    proglast = collections.defaultdict(int)

    proglocs = {'tick': None, 'datatot': 0, 'datalast': 0, 'tufotot': 0, 'tufolast': 0}

    def onGestData(mesg):
        proglocs['datatot'] += 1
        proglocs['datalast'] += 1

    def onNodeAdd(mesg):
        proglocs['tufotot'] += 1
        proglocs['tufolast'] += 1

    # callback for displaying progress...
    def onGestProg(mesg):

        act = mesg[1].get('act')

        progtot[act] += 1
        proglast[act] += 1

        progtot['total'] += 1
        proglast['total'] += 1

        progtick = proglocs.get('tick')
        if progtick is None:
            proglocs['tick'] = time.time()
            return

        if progtick is not None:

            now = time.time()
            delta = now - progtick

            if delta >= 1.0:

                tot = sum(proglast.values())
                persec = int(float(tot) / delta)
                tot = proglast.get('total', 0)

                datatot = proglocs.get('datatot', 0)
                datalast = proglocs.get('datalast', 0)
                datasec = int(float(datalast) / delta)

                tufotot = proglocs.get('tufotot', 0)
                tufolast = proglocs.get('tufolast', 0)
                tufosec = int(float(tufolast) / delta)

                totstat = tuple(sorted(progtot.items()))
                laststat = tuple(sorted(proglast.items()))

                totstr = ' '.join(['%s=%s' % (n, v) for (n, v) in totstat])
                laststr = ' '.join(['%s=%s' % (n, v) for (n, v) in laststat])

                outp.printf('data: %s %s/sec (%d) nodes: %s %s/sec (%d)' % (datalast, datasec, datatot, tufolast, tufosec, tufotot))

                proglast.clear()
                proglocs['tick'] = time.time()
                proglocs['datalast'] = 0
                proglocs['tufolast'] = 0

    if opts.save:
        outp.printf('saving sync events to: %s' % (opts.save,))
        core.addSpliceFd(s_common.genfile(opts.save))

    if opts.verbose:
        core.on('node:add', _print_tufo_add)
        core.on('node:tag:add', _print_tufo_tag_add)

    pump = None
    if opts.sync is not None:
        sync = s_cortex.openurl(opts.sync)
        pump = core.getSplicePump(sync)

    tick = time.time()

    with core.getCoreXact() as xact:

        for path in opts.files:
            gest = s_ingest.loadfile(path)

            if opts.progress:
                core.on('node:add', onNodeAdd)
                gest.on('gest:data', onGestData)
                gest.on('gest:prog', onGestProg)

            gest.ingest(core)

    tock = time.time()

    outp.printf('ingest took: %s sec' % (tock - tick,))

    if opts.debug:  # pragma: no cover
        s_cmdr.runItemCmdr(core)

    if pump is not None:
        pump.done()
        outp.printf('waiting on sync pump...')
        pump.waitfini()

    return 0
Exemple #28
0
def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    pars = argparse.ArgumentParser(prog='autodoc', description=descr)

    #pars.add_argument('--format', default='rst')
    pars.add_argument('--cortex',
                      default='ram://',
                      help='Cortex URL for model inspection')
    pars.add_argument(
        '--doc-model',
        action='store_true',
        default=False,
        help='Generate RST docs for the DataModel within a cortex')
    pars.add_argument(
        '--configable-opts',
        action='store_true',
        default=False,
        help='Generate RST docs of the Configable classes in Synapse.')
    pars.add_argument('--savefile',
                      default=None,
                      help='Save output to the given file')

    opts = pars.parse_args(argv)
    fd = None
    if opts.savefile:
        fd = open(opts.savefile, 'wb')
        outp = s_output.OutPutFd(fd)

    core = s_cortex.openurl(opts.cortex)

    if opts.doc_model:

        forms = []
        types = []

        props = collections.defaultdict(list)

        for tufo in core.getTufosByProp('syn:type'):
            name = tufo[1].get('syn:type')
            info = s_tufo.props(tufo)
            types.append((name, info))

        for tufo in core.getTufosByProp('syn:form'):
            name = tufo[1].get('syn:form')
            info = s_tufo.props(tufo)
            forms.append((name, info))

        for tufo in core.getTufosByProp('syn:prop'):
            prop = tufo[1].get('syn:prop')
            form = tufo[1].get('syn:prop:form')
            info = s_tufo.props(tufo)
            props[form].append((prop, info))

        types.sort()
        forms.sort()

        [v.sort() for v in props.values()]

        rst = RstHelp()
        rst.addHead('Synapse Data Model', lvl=0)

        rst.addHead('Types', lvl=1)

        for name, info in types:

            rst.addHead(name, lvl=2)
            inst = core.getTypeInst(name)

            ex = inst.get('ex')
            doc = inst.get('doc')

            if doc is not None:
                rst.addLines(doc)

            bases = core.getTypeBases(name)
            rst.addLines('', 'Type Hierarchy: %s' % (' -> '.join(bases), ), '')

            if ex is not None:

                #valu = core.getTypeParse(name,ex)
                #vrep = reprvalu(valu)

                rst.addLines('', 'Examples:', '')
                rst.addLines('- repr mode: %s' % (repr(ex), ))
                #rst.addLines('- system mode: %s' % (vrep,))
                rst.addLines('')

            cons = []
            xforms = []

            if core.isSubType(name, 'str'):

                regex = inst.get('regex')
                if regex is not None:
                    cons.append('- regex: %s' % (regex, ))

                lower = inst.get('lower')
                if lower:
                    xforms.append('- case: lower')

                restrip = inst.get('restrip')
                if restrip is not None:
                    xforms.append('- regex strip: %s' % (restrip, ))

                nullval = inst.get('nullval')
                if nullval is not None:
                    cons.append('- null value: %s' % (nullval, ))

            if core.isSubType(name, 'int'):

                minval = inst.get('min')
                if minval is not None:
                    cons.append('- min value: %d (0x%x)' % (minval, minval))

                maxval = inst.get('max')
                if maxval is not None:
                    cons.append('- max value: %d (0x%x)' % (maxval, maxval))

                ismin = inst.get('ismin')
                if ismin is not None:
                    xforms.append('- is minimum: True')

                ismax = inst.get('ismax')
                if ismax is not None:
                    xforms.append('- is maximum: True')

            if core.isSubType(name, 'sepr'):

                sep = inst.get('sep')
                fields = inst.get('fields')

                parts = []
                for part in fields.split('|'):
                    name, stype = part.split(',')
                    parts.append(stype)

                seprs = sep.join(['<%s>' % p for p in parts])
                rst.addLines('', 'Sepr Fields: %s' % (seprs, ))

            if cons:
                cons.append('')
                rst.addLines('', 'Type Constraints:', '', *cons)

            if xforms:
                xforms.append('')
                rst.addLines('', 'Type Transforms:', '', *xforms)

        rst.addHead('Forms', lvl=1)

        for name, info in forms:
            ftype = info.get('ptype', 'str')
            rst.addHead('%s = <%s>' % (name, ftype), lvl=2)

            doc = core.getPropInfo(name, 'doc')
            if doc is not None:
                rst.addLines(doc)

            rst.addLines('', 'Properties:', '')

            for prop, pnfo in props.get(name, ()):

                # use the resolver funcs that will recurse upward
                pex = core.getPropInfo(prop, 'ex')
                pdoc = core.getPropInfo(prop, 'doc')

                ptype = pnfo.get('ptype')

                pline = '\t- %s = <%s>' % (prop, ptype)

                defval = pnfo.get('defval')
                if defval is not None:
                    pline += ' (default: %r)' % (defval, )

                rst.addLines(pline)

                if pdoc:
                    rst.addLines('\t\t- %s' % (pdoc, ))

        outp.printf(rst.getRstText())
        return 0

    if opts.configable_opts:
        rst = RstHelp()
        rst.addHead('Synapse Configable Classes', lvl=0)
        rst.addLines(
            'The following objects are Configable objects. They have'
            ' settings which may be provided at runtime or during object'
            ' initialization which may change their behavior.')
        basename = 'synapse'
        confdetails = collections.defaultdict(list)

        for root, dirs, files in os.walk(base_synaspe_dir):

            if any([v for v in dir_skips if v in root]):
                continue

            for fn in files:
                if fn in fn_skips:
                    continue
                if not fn.endswith('.py'):
                    continue

                modname = fn.rsplit('.', 1)[0]
                _modpath = root[len(base_synaspe_dir) + 1:].replace(
                    os.sep, '.')
                modpath = '.'.join(
                    [v for v in [basename, _modpath, modname] if v])

                mod = importlib.import_module(modpath)
                for modattr, valu, name, results in inspect_mod(
                        mod, cls=s_config.Configable):
                    confdetails[modpath].append((modattr, name, results))

        # Collapse details into a modpath -> Details struct
        detaildict = collections.defaultdict(list)

        for modpath, details in confdetails.items():
            for detail in details:
                modattr, name, results = detail
                obj_path = '.'.join([modpath, modattr])
                if obj_path in obj_path_skips:
                    continue
                for rslt in results:
                    if not rslt:
                        continue
                    detaildict[obj_path].append(rslt)

        # Now make the RST proper like
        keys = list(detaildict.keys())
        keys.sort()
        for obj_path in keys:
            details = detaildict.get(obj_path, [])
            details.sort(key=lambda x: x[0])
            rst.addHead(name=obj_path, lvl=1)
            for detail in details:
                confvalu, confdict = detail[0], detail[1]
                rst.addHead(confvalu, lvl=2)
                _keys = list(confdict.keys())
                _keys.sort()
                for _key in _keys:
                    v = confdict.get(_key)
                    line = '  - {}: {}'.format(_key, v)
                    rst.addLines(line)

        outp.printf(rst.getRstText())
        if fd:
            fd.close()
        return 0
Exemple #29
0
def main(argv, outp=None):

    if outp == None:
        outp = s_output.OutPut()

    pars = argparse.ArgumentParser(prog='autodoc', description=descr)

    #pars.add_argument('--format', default='rst')
    pars.add_argument('--cortex',
                      default='ram://',
                      help='Cortex URL for model inspection')
    pars.add_argument(
        '--doc-model',
        action='store_true',
        default=False,
        help='Generate RST docs for the DataModel within a cortex')
    pars.add_argument('--savefile',
                      default=None,
                      help='Save output to the given file')

    opts = pars.parse_args(argv)
    if opts.savefile:
        fd = open(opts.savefile, 'wb')
        outp = s_output.OutPutFd(fd)

    core = s_cortex.openurl(opts.cortex)

    if opts.doc_model:

        forms = []
        types = []

        props = collections.defaultdict(list)

        for tufo in core.getTufosByProp('syn:type'):
            name = tufo[1].get('syn:type')
            info = s_tufo.props(tufo)
            types.append((name, info))

        for tufo in core.getTufosByProp('syn:form'):
            name = tufo[1].get('syn:form')
            info = s_tufo.props(tufo)
            forms.append((name, info))

        for tufo in core.getTufosByProp('syn:prop'):
            prop = tufo[1].get('syn:prop')
            form = tufo[1].get('syn:prop:form')
            info = s_tufo.props(tufo)
            props[form].append((prop, info))

        types.sort()
        forms.sort()

        [v.sort() for v in props.values()]

        rst = RstHelp()
        rst.addHead('Synapse Data Model', lvl=0)

        rst.addHead('Types', lvl=1)

        for name, info in types:

            rst.addHead(name, lvl=2)
            inst = core.getTypeInst(name)

            ex = inst.get('ex')
            doc = inst.get('doc')

            if doc != None:
                rst.addLines(doc)

            bases = core.getTypeBases(name)
            rst.addLines('', 'Type Hierarchy: %s' % (' -> '.join(bases), ), '')

            if ex != None:

                #valu = core.getTypeParse(name,ex)
                #vrep = reprvalu(valu)

                rst.addLines('', 'Examples:', '')
                rst.addLines('- repr mode: %s' % (repr(ex), ))
                #rst.addLines('- system mode: %s' % (vrep,))
                rst.addLines('')

            cons = []
            xforms = []

            if core.isSubType(name, 'str'):

                regex = inst.get('regex')
                if regex != None:
                    cons.append('- regex: %s' % (regex, ))

                lower = inst.get('lower')
                if lower:
                    xforms.append('- case: lower')

                restrip = inst.get('restrip')
                if restrip != None:
                    xforms.append('- regex strip: %s' % (restrip, ))

                nullval = inst.get('nullval')
                if nullval != None:
                    cons.append('- null value: %s' % (nullval, ))

            if core.isSubType(name, 'int'):

                minval = inst.get('min')
                if minval != None:
                    cons.append('- min value: %d (0x%x)' % (minval, minval))

                maxval = inst.get('max')
                if maxval != None:
                    cons.append('- max value: %d (0x%x)' % (maxval, maxval))

                ismin = inst.get('ismin')
                if ismin != None:
                    xforms.append('- is minimum: True')

                ismax = inst.get('ismax')
                if ismax != None:
                    xforms.append('- is maximum: True')

            if core.isSubType(name, 'sepr'):

                sep = inst.get('sep')
                fields = inst.get('fields')

                parts = []
                for part in fields.split('|'):
                    name, stype = part.split(',')
                    parts.append(stype)

                seprs = sep.join(['<%s>' % p for p in parts])
                rst.addLines('', 'Sepr Fields: %s' % (seprs, ))

            if cons:
                cons.append('')
                rst.addLines('', 'Type Constraints:', '', *cons)

            if xforms:
                xforms.append('')
                rst.addLines('', 'Type Transforms:', '', *xforms)

        rst.addHead('Forms', lvl=1)

        for name, info in forms:
            ftype = info.get('ptype', 'str')
            rst.addHead('%s = <%s>' % (name, ftype), lvl=2)

            doc = core.getPropInfo(name, 'doc')
            if doc != None:
                rst.addLines(doc)

            rst.addLines('', 'Properties:', '')

            for prop, pnfo in props.get(name, ()):

                # use the resolver funcs that will recurse upward
                pex = core.getPropInfo(prop, 'ex')
                pdoc = core.getPropInfo(prop, 'doc')

                ptype = pnfo.get('ptype')

                pline = '\t- %s = <%s>' % (prop, ptype)

                defval = pnfo.get('defval')
                if defval != None:
                    pline += ' (default: %r)' % (defval, )

                rst.addLines(pline)

                if pdoc:
                    rst.addLines('\t\t- %s' % (pdoc, ))

        outp.printf(rst.getRstText())
        return 0