Пример #1
0
def main(argv, outp=None):

    if outp is None:  # pragma: no cover
        outp = s_output.OutPut()
    parser = makeargpaser()
    opts = parser.parse_args(argv)

    if not opts.verbose:
        logging.disable(logging.DEBUG)

    # Check to see if we're working with a savefile or a dumprows file
    decompress = False
    discard_first_event = False
    with open(opts.input, 'rb') as fd:
        gen = s_msgpack.iterfd(fd)
        tufo0 = next(gen)
        if tufo0[0] == 'syn:cortex:rowdump:info':
            outp.printf('Restoring from a dumprows file.')
            discard_first_event = True
            decompress = tufo0[1].get('rows:compress')
            if decompress:
                outp.printf('Gzip row compression enabled.')
        else:
            outp.printf('Restoring from a savefile')
        # No longer need that generator around with the dangler to fd
        del gen

    storconf = {'rev:storage': False}
    if opts.revstorage:  # pragma: no cover
        storconf['rev:storage'] = True

    with open(opts.input, 'rb') as fd:
        gen = s_msgpack.iterfd(fd)
        if discard_first_event:
            next(gen)
        with s_cortex.openstore(opts.store, storconf=storconf) as store:
            outp.printf('Starting row level restore')
            tick = time.time()
            i = 0
            nrows = 0
            for event in gen:
                if decompress and 'rows' in event[1]:
                    event[1]['rows'] = s_msgpack.un(
                        gzip.decompress(event[1].get('rows')))
                i += 1
                if i % 250 == 0:
                    outp.printf('Loaded {} events'.format(i))
                store.loadbus.dist(event)
                _nrows = len(event[1].get('rows', ()))
                nrows += _nrows
                if _nrows and i % 10 == 0:
                    logger.debug('Loaded %s rows', nrows)

            tock = time.time()
            outp.printf('Done loading events - took {} seconds.'.format(tock -
                                                                        tick))
    outp.printf('Fin')
    return 0
Пример #2
0
    def test_msgpack_large_data(self):

        big_string = s_const.mebibyte * 129 * 'V'
        struct = ('test', {'key': big_string})

        buf = s_msgpack.en(struct)

        unpacked_struct = s_msgpack.un(buf)
        self.eq(struct, unpacked_struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (135266320, struct))
Пример #3
0
    def test_msgpack_types(self):
        # This is a future-proofing test for msgpack to ensure that
        buf = b'\x92\xa4hehe\x85\xa3str\xa41234\xa3int\xcd\x04\xd2\xa5float\xcb@(\xae\x14z\xe1G\xae\xa3bin\xc4\x041234\xa9realworld\xac\xc7\x8b\xef\xbf\xbd\xed\xa1\x82\xef\xbf\xbd\x12'
        struct = (
            'hehe',
            {
                'str': '1234',
                'int': 1234,
                'float': 12.34,
                'bin': b'1234',
                'realworld': '\u01cb\ufffd\ud842\ufffd\u0012'
            }
        )
        unode = s_msgpack.un(buf)
        self.eq(unode, struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (71, struct))
Пример #4
0
    def test_msgpack_large_data(self):

        big_string = s_const.mebibyte * 129 * 'V'
        struct = ('test', {'key': big_string})

        buf = s_msgpack.en(struct)

        unpacked_struct = s_msgpack.un(buf)
        self.eq(struct, unpacked_struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (135266320, struct))
Пример #5
0
    def test_msgpack_surrogates(self):
        bads = '\u01cb\ufffd\ud842\ufffd\u0012'
        obyts = s_msgpack.en(bads)
        self.isinstance(obyts, bytes)

        outs = s_msgpack.un(obyts)
        self.eq(outs, bads)

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            fd.write(obyts)
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(1, items)
            self.eq(outs, bads)

            fd.close()

        unpk = s_msgpack.Unpk()
        ret = unpk.feed(obyts)
        self.len(1, ret)
        self.eq([(13, bads)], ret)
Пример #6
0
    def test_msgpack_surrogates(self):
        bads = '\u01cb\ufffd\ud842\ufffd\u0012'
        obyts = s_msgpack.en(bads)
        self.isinstance(obyts, bytes)

        outs = s_msgpack.un(obyts)
        self.eq(outs, bads)

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            fd.write(obyts)
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(1, items)
            self.eq(outs, bads)

            fd.close()

        unpk = s_msgpack.Unpk()
        ret = unpk.feed(obyts)
        self.len(1, ret)
        self.eq([(13, bads)], ret)
Пример #7
0
    def checkTypes(self, enfunc):
        # This is a future-proofing test for msgpack to ensure that
        buf = b'\x92\xa4hehe\x85\xa3str\xa41234\xa3int\xcd\x04\xd2\xa5float\xcb@(\xae\x14z\xe1G\xae\xa3bin\xc4\x041234\xa9realworld\xac\xc7\x8b\xef\xbf\xbd\xed\xa1\x82\xef\xbf\xbd\x12'
        struct = (
            'hehe',
            {
                'str': '1234',
                'int': 1234,
                'float': 12.34,
                'bin': b'1234',
                'realworld': '\u01cb\ufffd\ud842\ufffd\u0012'
            }
        )
        unode = s_msgpack.un(buf)
        self.eq(unode, struct)

        # Ensure our use of msgpack.Unpacker can also handle this data
        with self.getTestDir() as dirn:
            with s_common.genfile(dirn, 'test.mpk') as fd:
                fd.write(buf)
            with s_common.genfile(dirn, 'test.mpk') as fd:
                genr = s_msgpack.iterfd(fd)
                objs = list(genr)
                self.len(1, objs)
                self.eq(objs[0], struct)

        # Ensure that our streaming Unpk object can also handle this data
        unpk = s_msgpack.Unpk()
        objs = unpk.feed(buf)
        self.len(1, objs)
        self.eq(objs[0], (71, struct))

        # Generic isok helper
        self.true(s_msgpack.isok(1))
        self.true(s_msgpack.isok('1'))
        self.true(s_msgpack.isok(1.1))
        self.true(s_msgpack.isok(b'1'))
        self.true(s_msgpack.isok(None))
        self.true(s_msgpack.isok(True))
        self.true(s_msgpack.isok(False))
        self.true(s_msgpack.isok([1]))
        self.true(s_msgpack.isok((1,)))
        self.true(s_msgpack.isok({1: 1}))
        # unpackage types
        self.false(s_msgpack.isok({1, 2}))  # set
        self.false(s_msgpack.isok(print))  # function

        buf2 = b'\x81\xc0\xcd\x04\xd2'
        struct2 = {
            None: 1234
        }
        ustruct2 = s_msgpack.un(buf2)
        self.eq(ustruct2, struct2)
        pbuf2 = enfunc(ustruct2)
        self.eq(buf2, pbuf2)
Пример #8
0
def main(argv, outp=s_output.stdout):

    pars = argparse.ArgumentParser(prog='cryo.cat', description='display data items from a cryo cell')
    pars.add_argument('cryotank', help='The telepath URL for the remote cryotank.')
    pars.add_argument('--offset', default=0, type=int, help='Begin at offset index')
    pars.add_argument('--size', default=10, type=int, help='How many items to display')
    pars.add_argument('--omit-offset', default=False, action='store_true', help='Output raw items with no offsets.')
    # TODO: synapse.tools.cryo.list <cryocell>
    #pars.add_argument('--list', default=False, action='store_true', help='List tanks in the remote cell and return')
    group = pars.add_mutually_exclusive_group()
    group.add_argument('--jsonl', action='store_true', help='Input/Output items in jsonl format')
    group.add_argument('--msgpack', action='store_true', help='Input/Output items in msgpack format')
    pars.add_argument('--verbose', '-v', default=False, action='store_true', help='Verbose output')
    pars.add_argument('--ingest', '-i', default=False, action='store_true',
                      help='Reverses direction: feeds cryotank from stdin in msgpack or jsonl format')

    opts = pars.parse_args(argv)

    if opts.verbose:
        logger.setLevel(logging.INFO)

    if opts.ingest and not opts.jsonl and not opts.msgpack:
        outp.printf('Must specify exactly one of --jsonl or --msgpack if --ingest is specified')
        return 1

    logger.info(f'connecting to: {opts.cryotank}')

    with s_telepath.openurl(opts.cryotank) as tank:

        if opts.ingest:

            if opts.msgpack:
                items = list(s_msgpack.iterfd(sys.stdin.buffer))
                tank.puts(items)
                return 0

            items = [json.loads(l) for l in sys.stdin]
            tank.puts(items)
            return 0

        for item in tank.slice(opts.offset, opts.size):

            if opts.jsonl:
                outp.printf(json.dumps(item[1], sort_keys=True))

            elif opts.msgpack:
                sys.stdout.buffer.write(s_msgpack.en(item[1]))

            else:
                outp.printf(pprint.pformat(item))
    return 0
Пример #9
0
    def test_simple_use(self):
        self.thisHostMustNot(platform='darwin')
        outp = self.getTestOutp()
        with self.getTestDir() as temp:
            fp = os.path.join(temp, 'dumpfile.mpk')
            new_db = os.path.join(temp, 'test.db')
            sqlite_url = 'sqlite:///{}'.format(new_db)
            with s_cortex.openurl(sqlite_url) as core:
                self.true(core.isnew)
                core.setBlobValu('syn:test:tel', 8675309)
                with core.getCoreXact():
                    core.formTufoByProp('inet:ipv4', 0x01020304)
                    for i in range(1000):
                        core.formTufoByProp('inet:ipv4', i)

            # Now dump that sqlite core
            argv = ['-s', sqlite_url, '-o', fp]
            ret = s_dumprows.main(argv, outp)
            self.eq(ret, 0)

            # Now ensure our .mpk file is correct
            with open(fp, 'rb') as fd:
                gen = s_msgpack.iterfd(fd)
                evt = next(gen)
                self.eq(evt[0], 'syn:cortex:rowdump:info')
                self.eq(evt[1].get('rows:compress'), False)
                self.eq(evt[1].get('synapse:rows:output'), fp)
                self.eq(evt[1].get('synapse:cortex:input'), sqlite_url)
                self.eq(evt[1].get('synapse:cortex:blob_store'), False)
                self.eq(evt[1].get('synapse:cortex:revstore'), False)
                self.eq(evt[1].get('python:version'), version)
                self.isin('synapse:version', evt[1])
                evt = next(gen)
                self.eq(evt[0], 'core:save:add:rows')
                self.isin('rows', evt[1])
                rows = evt[1].get('rows')
                self.isinstance(rows, tuple)
                self.isinstance(rows[0], tuple)
                self.eq(len(rows[0]), 4)
                # Expensive but worth checking
                event_types = set()
                event_types.add(evt[0])
                total_rows = 0
                for evt in gen:
                    event_types.add(evt[0])
                    if 'rows' in evt[1]:
                        total_rows = total_rows + len(evt[1].get('rows'))
                self.gt(total_rows, 1000)
                self.eq(event_types, {'core:save:add:rows'})
Пример #10
0
    def _setSaveFd(self, fd, load=True, fini=False):
        '''
        The default implementation of savefile for a Cortex.
        This may be overridden by a storage layer.
        '''
        if load:
            for mesg in s_msgpack.iterfd(fd):
                self.loadbus.dist(mesg)

        self.onfini(fd.flush)
        if fini:
            self.onfini(fd.close)

        def savemesg(mesg):
            fd.write(s_msgpack.en(mesg))

        self.savebus.link(savemesg)
Пример #11
0
    def test_msgpack_iterfd(self):
        t0 = ('5678', {'key': 1})
        t1 = ('1234', {'key': 'haha'})

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            for obj in (t0, t1):
                fd.write(s_msgpack.en(obj))
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(2, items)
            self.sorteq(items, [t0, t1])

            fd.close()
Пример #12
0
    def test_msgpack_iterfd(self):
        t0 = ('5678', {'key': 1})
        t1 = ('1234', {'key': 'haha'})

        with self.getTestDir() as fdir:
            fd = s_common.genfile(fdir, 'test.mpk')
            for obj in (t0, t1):
                fd.write(s_msgpack.en(obj))
            fd.close()

            fd = s_common.genfile(fdir, 'test.mpk')
            gen = s_msgpack.iterfd(fd)

            items = [obj for obj in gen]
            self.len(2, items)
            self.sorteq(items, [t0, t1])

            fd.close()
Пример #13
0
    def test_dump_largecore(self):
        self.skipLongTest()
        self.thisHostMustNot(platform='darwin')
        # This ensure we're executing the "dump rows
        # when we have N number of bytes cached codepath.
        # Unfortunately this is a bit slow (2-4 seconds).
        ntufos = 40000
        outp = self.getTestOutp()
        with self.getTestDir() as temp:
            fp = os.path.join(temp, 'dumpfile.mpk')
            new_db = os.path.join(temp, 'test.db')
            sqlite_url = 'sqlite:///{}'.format(new_db)
            with s_cortex.openurl(sqlite_url) as core:
                self.true(core.isnew)
                rows = []
                tick = now()
                for i in range(1, ntufos):
                    iden = guid()
                    rows.append((iden, 'tufo:form', 'inet:asn', tick))
                    rows.append((iden, 'inet:asn', i, tick))
                    rows.append((iden, 'inet:asn:name', '??', tick))
                core.addRows(rows)
                q = 'SELECT count(1) from {}'.format(
                    core.store._getTableName())
                num_core_rows = core.store.select(q)[0][0]

            # Now dump that sqlite core
            argv = ['-s', sqlite_url, '-o', fp]
            ret = s_dumprows.main(argv, outp)
            self.eq(ret, 0)

            stat = os.stat(fp)
            self.gt(stat.st_size, s_const.mebibyte * 4)

            # Now ensure our .mpk file is correct
            with open(fp, 'rb') as fd:
                msgpk_rows = 0
                for evt in s_msgpack.iterfd(fd):
                    if 'rows' in evt[1]:
                        msgpk_rows = msgpk_rows + len(evt[1].get('rows'))
            self.eq(num_core_rows, msgpk_rows)
Пример #14
0
def convertSpliceFd(fpath):
    '''
    Converts an "old" splice log to the new format.

    Args:
        fpath (str): The path to the "old" splice log file.

    Example:
        convertSpliceFd('/stuff/oldsplicelog.mpk')

    Notes:
        This function reads the an "old" splice log file, writes to a temporary
        file, and then overwrites the old file with the new data. This function
        only converts old splices to new splices. If any messages are invalid,
        an exception will be raised and the conversion will exit early and not
        overwrite any data.

    Returns:
        None
    '''
    with tempfile.SpooledTemporaryFile() as tmp:
        with open(fpath, 'r+b') as fd:

            for chnk in s_common.chunks(s_msgpack.iterfd(fd), 1000):
                for mesg in chnk:
                    newspl = convertOldSplice(mesg)
                    if newspl:
                        mesg = newspl[1]['mesg']
                    tmp.write(s_msgpack.en(mesg))

            tmp.seek(0)
            fd.seek(0)

            data = tmp.read(_readsz)
            while data:
                fd.write(data)
                data = tmp.read(_readsz)

            fd.truncate()
Пример #15
0
    def test_simple_compress(self):
        self.thisHostMustNot(platform='darwin')
        outp = self.getTestOutp()
        with self.getTestDir() as temp:
            fp = os.path.join(temp, 'dumpfile.mpk')
            new_db = os.path.join(temp, 'test.db')
            sqlite_url = 'sqlite:///{}'.format(new_db)
            with s_cortex.openurl(sqlite_url) as core:
                self.true(core.isnew)
                core.setBlobValu('syn:test:tel', 8675309)
                core.formTufoByProp('inet:ipv4', 0x01020304)
            # Now dump that sqlite core
            argv = ['-s', sqlite_url, '-o', fp, '--compress']
            ret = s_dumprows.main(argv, outp)
            self.eq(ret, 0)

            # Now ensure our .mpk file is correct
            with open(fp, 'rb') as fd:
                gen = s_msgpack.iterfd(fd)
                evt = next(gen)
                self.eq(evt[0], 'syn:cortex:rowdump:info')
                self.eq(evt[1].get('rows:compress'), True)
                evt = next(gen)
                self.eq(evt[0], 'core:save:add:rows')
                self.isin('rows', evt[1])
                rows = evt[1].get('rows')
                # we decode the rows blob not in place but separately here
                rows = s_msgpack.un(gzip.decompress(rows))
                self.isinstance(rows, tuple)
                self.isinstance(rows[0], tuple)
                self.eq(len(rows[0]), 4)
                # Expensive but worth checking
                event_types = set()
                event_types.add(evt[0])
                for evt in gen:
                    event_types.add(evt[0])
                self.eq(event_types, {'core:save:add:rows'})
Пример #16
0
    def test_blob_dump(self):
        outp = self.getTestOutp()
        with self.getTestDir() as temp:
            fp = os.path.join(temp, 'dumpfile.mpk')
            new_db = os.path.join(temp, 'test.db')
            sqlite_url = 'sqlite:///{}'.format(new_db)
            with s_cortex.openurl(sqlite_url) as core:
                self.true(core.isnew)
                core.setBlobValu('syn:test:tel', 8675309)
                core.formTufoByProp('inet:ipv4', 0x01020304)

            # Now dump that sqlite core
            argv = ['-s', sqlite_url, '-o', fp, '--dump-blobstore']
            ret = s_dumprows.main(argv, outp)
            self.eq(ret, 0)

            # Now ensure our .mpk file is correct
            with open(fp, 'rb') as fd:
                gen = s_msgpack.iterfd(fd)
                evt = next(gen)
                self.eq(evt[0], 'syn:cortex:rowdump:info')
                self.eq(evt[1].get('synapse:cortex:blob_store'), True)
                evt = next(gen)
                self.eq(evt[0], 'core:save:add:rows')
                self.isin('rows', evt[1])
                rows = evt[1].get('rows')
                self.isinstance(rows, tuple)
                self.isinstance(rows[0], tuple)
                self.eq(len(rows[0]), 4)
                # Expensive but worth checking
                event_types = set()
                event_types.add(evt[0])
                for evt in gen:
                    event_types.add(evt[0])
                self.eq(event_types,
                        {'core:save:add:rows', 'syn:core:blob:set'})
Пример #17
0
def main(argv, outp=s_output.stdout):

    pars = argparse.ArgumentParser(
        prog='cryo.cat', description='display data items from a cryo cell')
    pars.add_argument('cryotank',
                      help='The telepath URL for the remote cryotank.')
    pars.add_argument('--offset',
                      default=0,
                      type=int,
                      help='Begin at offset index')
    pars.add_argument('--size',
                      default=10,
                      type=int,
                      help='How many items to display')
    # TODO: synapse.tools.cryo.list <cryocell>
    #pars.add_argument('--list', default=False, action='store_true', help='List tanks in the remote cell and return')
    group = pars.add_mutually_exclusive_group()
    group.add_argument('--jsonl',
                       action='store_true',
                       help='Input/Output items in jsonl format')
    group.add_argument('--msgpack',
                       action='store_true',
                       help='Input/Output items in msgpack format')
    pars.add_argument('--verbose',
                      '-v',
                      default=False,
                      action='store_true',
                      help='Verbose output')
    pars.add_argument(
        '--ingest',
        '-i',
        default=False,
        action='store_true',
        help=
        'Reverses direction: feeds cryotank from stdin in msgpack or jsonl format'
    )

    opts = pars.parse_args(argv)

    if opts.verbose:
        logger.setLevel(logging.INFO)

    if opts.ingest and not opts.jsonl and not opts.msgpack:
        logger.error(
            'Must specify exactly one of --jsonl or --msgpack if --ingest is specified'
        )
        return 1

    logger.info(f'connecting to: {opts.cryotank}')

    with s_telepath.openurl(opts.cryotank) as tank:

        try:

            typename = tank.getCellType()
            if typename != 'cryotank':
                outp.printf('error: remote object is a: {typename}')
                return 1

        except Exception as e:
            outp.printf('error: remote object is *not* a cell!')
            return 1

        if opts.ingest:

            if opts.msgpack:
                items = list(s_msgpack.iterfd(sys.stdin.buffer))
                tank.puts(items)
                return

            items = [json.loads(l) for l in sys.stdin]
            tank.puts(items)
            return 0

        for item in tank.slice(opts.offset, opts.size):

            if opts.jsonl:
                outp.printf(json.dumps(item[1], sort_keys=True))

            elif opts.msgpack:
                sys.stdout.buffer.write(s_msgpack.en(item[1]))

            else:
                outp.printf(pprint.pformat(item))
Пример #18
0
def _fmt_mpk(fd, info):
    yield from s_msgpack.iterfd(fd)
Пример #19
0
def _fmt_mpk(fd, info):
    yield from s_msgpack.iterfd(fd)
Пример #20
0
def main(argv, outp=s_output.stdout):

    pars = argparse.ArgumentParser(
        prog='cryo.cat', description='display data items from a cryo cell')
    pars.add_argument(
        'cryocell',
        help=
        'The cell descriptor and cryo tank path (cell://<host:port>/<name>).')
    pars.add_argument('--list',
                      default=False,
                      action='store_true',
                      help='List tanks in the remote cell and return')
    pars.add_argument('--offset',
                      default=0,
                      type=int,
                      help='Begin at offset index')
    pars.add_argument('--size',
                      default=10,
                      type=int,
                      help='How many items to display')
    pars.add_argument('--timeout',
                      default=10,
                      type=int,
                      help='The network timeout setting')
    pars.add_argument('--authfile',
                      help='Path to your auth file for the remote cell')
    group = pars.add_mutually_exclusive_group()
    group.add_argument('--jsonl',
                       action='store_true',
                       help='Input/Output items in jsonl format')
    group.add_argument('--msgpack',
                       action='store_true',
                       help='Input/Output items in msgpack format')
    pars.add_argument('--verbose',
                      '-v',
                      default=False,
                      action='store_true',
                      help='Verbose output')
    pars.add_argument(
        '--ingest',
        '-i',
        default=False,
        action='store_true',
        help=
        'Reverses direction: feeds cryotank from stdin in msgpack or jsonl format'
    )
    pars.add_argument(
        '--omit-offset',
        default=False,
        action='store_true',
        help=
        "Don't output offsets of objects. This is recommended to be used when jsonl/msgpack"
        " output is used.")

    opts = pars.parse_args(argv)

    if opts.verbose:
        logger.setLevel(logging.INFO)

    if not opts.authfile:
        logger.error(
            'Currently requires --authfile until neuron protocol is supported')
        return 1

    if opts.ingest and not opts.jsonl and not opts.msgpack:
        logger.error(
            'Must specify exactly one of --jsonl or --msgpack if --ingest is specified'
        )
        return 1

    authpath = s_common.genpath(opts.authfile)

    auth = s_msgpack.loadfile(authpath)

    netw, path = opts.cryocell[7:].split('/', 1)
    host, portstr = netw.split(':')

    addr = (host, int(portstr))
    logger.info('connecting to: %r', addr)

    cryo = s_cryotank.CryoUser(auth, addr, timeout=opts.timeout)

    if opts.list:
        for name, info in cryo.list(timeout=opts.timeout):
            outp.printf('%s: %r' % (name, info))

        return 0

    if opts.ingest:
        if opts.msgpack:
            fd = sys.stdin.buffer
            item_it = _except_wrap(s_msgpack.iterfd(fd),
                                   lambda x: 'Error parsing item %d' % x)
        else:
            fd = sys.stdin
            item_it = _except_wrap((json.loads(s) for s in fd), lambda x:
                                   ('Failure parsing line %d of input' % x))
        cryo.puts(path, item_it)
    else:
        for item in cryo.slice(path, opts.offset, opts.size, opts.timeout):
            i = item[1] if opts.omit_offset else item
            if opts.jsonl:
                outp.printf(json.dumps(i, sort_keys=True))
            elif opts.msgpack:
                sys.stdout.write(s_msgpack.en(i))
            else:
                outp.printf(pprint.pformat(i))

    return 0