Exemplo n.º 1
0
async def main(argv, outp=None):

    if outp is None:
        outp = s_output.OutPut()

    pars = makeargparser()

    opts = pars.parse_args(argv)

    if opts.doc_model:

        if opts.cortex:
            async with await s_telepath.openurl(opts.cortex) as core:
                rsttypes, rstforms = await docModel(outp, core)

        else:
            async with s_cortex.getTempCortex() as core:
                rsttypes, rstforms = await docModel(outp, core)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, 'datamodel_types.rst'),
                      'wb') as fd:
                fd.write(rsttypes.getRstText().encode())
            with open(s_common.genpath(opts.savedir, 'datamodel_forms.rst'),
                      'wb') as fd:
                fd.write(rstforms.getRstText().encode())

    return 0
Exemplo n.º 2
0
async def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = makeargparser()
    opts = pars.parse_args(argv)

    if opts.offset:
        if len(opts.files) > 1:
            outp.printf('Cannot start from a arbitrary offset for more than 1 file.')
            return 1

        outp.printf(f'Starting from offset [{opts.offset}] - it may take a while'
                    f' to get to that location in the input file.')

    if opts.test:
        async with s_cortex.getTempCortex(mods=opts.modules) as prox:
            await addFeedData(prox, outp, opts.format, opts.debug,
                        chunksize=opts.chunksize,
                        offset=opts.offset,
                        *opts.files)

    elif opts.cortex:
        async with await s_telepath.openurl(opts.cortex) as core:
            await addFeedData(core, outp, opts.format, opts.debug,
                        chunksize=opts.chunksize,
                        offset=opts.offset,
                        *opts.files)

    else:  # pragma: no cover
        outp.printf('No valid options provided [%s]', opts)
        return 1

    return 0
Exemplo n.º 3
0
async def main(argv, outp=None):
    if outp is None:
        outp = s_output.OutPut()

    pars = makeargparser()

    opts = pars.parse_args(argv)

    if opts.doc_model:

        if opts.cortex:
            async with await s_telepath.openurl(opts.cortex) as core:
                rsttypes, rstforms = await docModel(outp, core)

        else:
            async with s_cortex.getTempCortex() as core:
                rsttypes, rstforms = await docModel(outp, core)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, 'datamodel_types.rst'), 'wb') as fd:
                fd.write(rsttypes.getRstText().encode())
            with open(s_common.genpath(opts.savedir, 'datamodel_forms.rst'), 'wb') as fd:
                fd.write(rstforms.getRstText().encode())

    if opts.doc_conf:
        confdocs, cname = await docConfdefs(opts.doc_conf,
                                            reflink=opts.doc_conf_reflink,
                                            doc_title=opts.doc_conf_title,
                                            )

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'conf_{cname.lower()}.rst'), 'wb') as fd:
                fd.write(confdocs.getRstText().encode())

    if opts.doc_storm:
        confdocs, svcname = await docStormsvc(opts.doc_storm)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'stormsvc_{svcname.lower()}.rst'), 'wb') as fd:
                fd.write(confdocs.getRstText().encode())

    if opts.doc_stormpkg:
        pkgdocs, pkgname = await docStormpkg(opts.doc_stormpkg)

        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'stormpkg_{pkgname.lower()}.rst'), 'wb') as fd:
                fd.write(pkgdocs.getRstText().encode())

    if opts.doc_stormtypes:
        libdocs, typedocs = await docStormTypes()
        if opts.savedir:
            with open(s_common.genpath(opts.savedir, f'stormtypes_libs.rst'), 'wb') as fd:
                fd.write(libdocs.getRstText().encode())
            with open(s_common.genpath(opts.savedir, f'stormtypes_prims.rst'), 'wb') as fd:
                fd.write(typedocs.getRstText().encode())

    return 0
Exemplo n.º 4
0
async def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = makeargparser()
    opts = pars.parse_args(argv)

    if opts.offset:
        if len(opts.files) > 1:
            outp.printf(
                'Cannot start from a arbitrary offset for more than 1 file.')
            return 1

        outp.printf(
            f'Starting from offset [{opts.offset}] - it may take a while'
            f' to get to that location in the input file.')

    if opts.test:
        async with s_cortex.getTempCortex(mods=opts.modules) as prox:
            await addFeedData(prox,
                              outp,
                              opts.format,
                              opts.debug,
                              chunksize=opts.chunksize,
                              offset=opts.offset,
                              *opts.files)

    elif opts.cortex:
        async with await s_telepath.openurl(opts.cortex) as core:
            try:
                s_version.reqVersion(core._getSynVers(), reqver)
            except s_exc.BadVersion as e:
                valu = s_version.fmtVersion(*e.get('valu'))
                outp.printf(
                    f'Cortex version {valu} is outside of the feed tool supported range ({reqver}).'
                )
                outp.printf(
                    f'Please use a version of Synapse which supports {valu}; '
                    f'current version is {s_version.verstring}.')
                return 1
            await addFeedData(core,
                              outp,
                              opts.format,
                              opts.debug,
                              chunksize=opts.chunksize,
                              offset=opts.offset,
                              *opts.files)

    else:  # pragma: no cover
        outp.printf('No valid options provided [%s]', opts)
        return 1

    return 0
Exemplo n.º 5
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()

    pars = makeargparser()
    opts = pars.parse_args(argv)

    if opts.offset:
        if len(opts.files) > 1:
            outp.printf(
                'Cannot start from a arbitrary offset for more than 1 file.')
            return 1

        outp.printf(
            f'Starting from offset [{opts.offset}] - it may take a while'
            f' to get to that location in the input file.')

    if opts.test:
        with s_cortex.getTempCortex(opts.modules) as prox:
            addFeedData(prox,
                        outp,
                        opts.format,
                        opts.debug,
                        chunksize=opts.chunksize,
                        offset=opts.offset,
                        *opts.files)

    elif opts.cortex:
        with s_telepath.openurl(opts.cortex) as core:
            addFeedData(core,
                        outp,
                        opts.format,
                        opts.debug,
                        chunksize=opts.chunksize,
                        offset=opts.offset,
                        *opts.files)

    else:  # pragma: no cover
        outp.printf('No valid options provided [%s]', opts)
        return 1

    return 0
Exemplo n.º 6
0
async def main(argv, outp=s_output.stdout):

    pars = makeargparser()

    try:
        opts = pars.parse_args(argv)
    except s_exc.ParserExit as e:
        return e.get('status')

    with open(opts.stormfile, 'r', encoding='utf8') as fd:
        text = fd.read()

    def iterrows():
        for path in opts.csvfiles:

            with open(path, 'r', encoding='utf8') as fd:

                if opts.csv_header:
                    fd.readline()

                def genr():

                    for row in csv.reader(fd):
                        yield row

                for rows in s_common.chunks(genr(), 1000):
                    yield rows

    rowgenr = iterrows()

    logfd = None
    if opts.logfile is not None:
        logfd = s_common.genfile(opts.logfile)

    async def addCsvData(core):

        newcount, nodecount = 0, 0

        for rows in rowgenr:

            stormopts = {
                'vars': {'rows': rows},
            }

            async for mesg in core.storm(text, opts=stormopts):

                if mesg[0] == 'node:add':
                    newcount += 1

                elif mesg[0] == 'node':
                    nodecount += 1

                elif mesg[0] == 'err' and not opts.debug:
                    outp.printf(repr(mesg))

                if opts.debug:
                    outp.printf(repr(mesg))

                if logfd is not None:
                    byts = json.dumps(mesg).encode('utf8')
                    logfd.write(byts + b'\n')

        if opts.cli:
            await s_cmdr.runItemCmdr(core, outp)

        return newcount, nodecount

    if opts.test:
        async with s_cortex.getTempCortex() as core:
            newcount, nodecount = await addCsvData(core)

    else:
        async with await s_telepath.openurl(opts.cortex) as core:
            newcount, nodecount = await addCsvData(core)

    if logfd is not None:
        logfd.close()

    outp.printf('%d nodes (%d created).' % (nodecount, newcount,))
Exemplo n.º 7
0
async def main(argv, outp=s_output.stdout):

    pars = makeargparser()

    try:
        opts = pars.parse_args(argv)
    except s_exc.ParserExit as e:
        return e.get('status')

    with open(opts.stormfile, 'r', encoding='utf8') as fd:
        text = fd.read()

    if opts.export:

        if not opts.cortex:
            outp.printf('--export requires --cortex')
            return -1

        if len(opts.csvfiles) != 1:
            outp.printf('--export requires exactly 1 csvfile')
            return -1

        path = s_common.genpath(opts.csvfiles[0])
        outp.printf(f'Exporting CSV rows to: {path}')

        async with await s_telepath.openurl(opts.cortex) as core:

            with open(path, 'w') as fd:

                wcsv = csv.writer(fd)

                # prevent streaming nodes by limiting shown events
                opts = {'show': ('csv:row', 'print', 'warn', 'err')}

                count = 0
                async for name, info in core.storm(text, opts=opts):

                    if name == 'csv:row':
                        count += 1
                        wcsv.writerow(info['row'])
                        continue

                    if name in ('init', 'fini'):
                        continue

                    outp.printf('%s: %r' % (name, info))

                outp.printf(f'exported {count} csv rows.')

        return

    def iterrows():
        for path in opts.csvfiles:

            with open(path, 'r', encoding='utf8') as fd:

                if opts.csv_header:
                    fd.readline()

                def genr():

                    for row in csv.reader(fd):
                        yield row

                for rows in s_common.chunks(genr(), 1000):
                    yield rows

    rowgenr = iterrows()

    logfd = None
    if opts.logfile is not None:
        logfd = s_common.genfile(opts.logfile)
        logfd.seek(0, 2)

    async def addCsvData(core):

        newcount, nodecount = 0, 0

        for rows in rowgenr:

            stormopts = {
                'vars': {
                    'rows': rows
                },
            }

            async for mesg in core.storm(text, opts=stormopts):

                if mesg[0] == 'node:add':
                    newcount += 1

                elif mesg[0] == 'node':
                    nodecount += 1

                elif mesg[0] == 'err' and not opts.debug:
                    outp.printf(repr(mesg))

                elif mesg[0] == 'print':
                    outp.printf(mesg[1].get('mesg'))

                if opts.debug:
                    outp.printf(repr(mesg))

                if logfd is not None:
                    byts = json.dumps(mesg).encode('utf8')
                    logfd.write(byts + b'\n')

        if opts.cli:
            await s_cmdr.runItemCmdr(core, outp, True)

        return newcount, nodecount

    if opts.test:
        async with s_cortex.getTempCortex() as core:
            newcount, nodecount = await addCsvData(core)

    else:
        async with await s_telepath.openurl(opts.cortex) as core:
            newcount, nodecount = await addCsvData(core)

    if logfd is not None:
        logfd.close()

    outp.printf('%d nodes (%d created).' % (
        nodecount,
        newcount,
    ))
Exemplo n.º 8
0
def main(argv, outp=s_output.stdout):
    pars = makeargparser()
    try:
        opts = pars.parse_args(argv)
    except s_exc.ParserExit as e:
        return e.get('status')

    with open(opts.stormfile, 'r', encoding='utf8') as fd:
        text = fd.read()

    def iterrows():
        for path in opts.csvfiles:

            with open(path, 'r', encoding='utf8') as fd:

                if opts.csv_header:
                    fd.readline()

                def genr():

                    for row in csv.reader(fd):
                        yield row

                for rows in s_common.chunks(genr(), 1000):
                    yield rows

    rowgenr = iterrows()

    logfd = None
    if opts.logfile is not None:
        logfd = s_common.genfile(opts.logfile)

    def addCsvData(core):
        newcount, nodecount = 0, 0
        for rows in rowgenr:

            stormopts = {
                'vars': {'rows': rows},
            }

            for mesg in core.storm(text, opts=stormopts):

                if mesg[0] == 'node:add':
                    newcount += 1

                elif mesg[0] == 'node':
                    nodecount += 1

                elif mesg[0] == 'err' and not opts.debug:
                    outp.printf(repr(mesg))

                if opts.debug:
                    outp.printf(repr(mesg))

                if logfd is not None:
                    byts = json.dumps(mesg).encode('utf8')
                    logfd.write(byts + b'\n')

        if opts.cli:
            s_cmdr.runItemCmdr(core, outp)

        return newcount, nodecount

    if opts.test:
        with s_cortex.getTempCortex() as core:
            newcount, nodecount = addCsvData(core)

    else:
        with s_telepath.openurl(opts.cortex) as core:
            newcount, nodecount = addCsvData(core)

    if logfd is not None:
        logfd.close()

    outp.printf('%d nodes (%d created).' % (nodecount, newcount,))