Ejemplo n.º 1
0
def picarus_loader(prefix, dataset, email, picarus_server, api_key=None, login_key=None, otp=None, download=False, test=False, verbose=False):
    import hadoopy_hbase
    import picarus
    dataset = DATASETS[dataset]()
    if download:
        dataset.download()
    if otp:
        api_key = picarus.PicarusClient(email=email, login_key=login_key, server=picarus_server).auth_yubikey(otp)['apiKey']
    if api_key is None:
        raise ValueError('api_key or login_key/otp must be set!')
    client = picarus.PicarusClient(email=email, api_key=api_key, server=picarus_server, max_attempts=10)
    for split, name, columns in dataset.images():
        row = hadoopy_hbase.hash_key(name, prefix=prefix + split, suffix=name, hash_bytes=4)
        if verbose:
            print('row[%r] len(data:image)[%d]' % (repr(row), len(columns.get('data:image', ''))))
        client.patch_row(TABLE, row, columns)
        if test:
            remote_columns = client.get_row(TABLE, row)
            if remote_columns != columns:
                print(remote_columns)
                print(columns)
                print({x: len(y) for x, y in remote_columns.items()})
                print({x: len(y) for x, y in columns.items()})
                assert remote_columns == columns
            client.delete_row(TABLE, row)
Ejemplo n.º 2
0
def main(email,
         table,
         prefix,
         path,
         picarus_server,
         api_key=None,
         login_key=None,
         otp=None,
         start_row=''):
    path = os.path.abspath(path)
    if otp:
        api_key = picarus.PicarusClient(
            email=email, login_key=login_key,
            server=picarus_server).auth_yubikey(otp)['apiKey']
    if api_key is None:
        raise ValueError('api_key or login_key/otp must be set!')
    if start_row:
        start_row = base64.urlsafe_b64decode(start_row)
    client = picarus.PicarusClient(email=email,
                                   api_key=api_key,
                                   server=picarus_server)
    for row_path in sorted(glob.glob(path + '/*')):
        row = prefix + base64.urlsafe_b64decode(os.path.basename(row_path))
        if row < start_row:
            continue
        columns = {}
        for column_path in glob.glob(row_path + '/*'):
            column = base64.urlsafe_b64decode(os.path.basename(column_path))
            columns[column] = open(column_path, 'rb').read()
        print('Sending [%r] ub64:[%s] to Picarus' %
              (row, base64.urlsafe_b64encode(row)))
        client.patch_row(table, row, columns)
Ejemplo n.º 3
0
    def test_docs(self):
        import glob
        import os
        import picarus
        email = os.environ['EMAIL']
        login_key = os.environ['LOGIN_KEY']
        server = os.environ['SERVER']
        if 'API_KEY' not in os.environ:
            otp = raw_input('Yubikey OTP: ')
            api_key = picarus.PicarusClient(
                server=server, email=email,
                login_key=login_key).auth_yubikey(otp)['apiKey']
        else:
            api_key = os.environ['API_KEY']
        prefix = ['import picarus']

        def test_passed():
            print('\033[92mTest Passed\033[0m')

        def test_failed():
            print('\033[91mTest Failed\033[0m')

        if 'OTP' not in os.environ:
            otp = raw_input('Yubikey OTP: ')
        else:
            otp = os.environ['OTP']
        for doc_fn in glob.glob('../doc/*.rst'):
            for source in parse_tests(doc_fn):
                source = '\n'.join(prefix + source)
                print('Test from file [%s]' % doc_fn)
                print(source)
                exec(compile(source, 'blah.py', 'exec'), {}, {
                    'email': email,
                    'login_key': login_key,
                    'api_key': api_key,
                    'server': server,
                    'otp': otp
                })
Ejemplo n.º 4
0
def _picarus_data(email, api_key, **kw):
    return picarus.PicarusClient(email=email, api_key=api_key)
Ejemplo n.º 5
0
def picarus_store(email, api_key, prefix, **kw):
    import picarus
    client = picarus.PicarusClient(email=email, api_key=api_key)
    for row, columns in load_dir(**kw):
        client.patch_row('images', prefix + row, columns)
Ejemplo n.º 6
0
                row_start, row_stop = get_row_bounds(EVENT_ROW_TIMES[event],
                                                     s['start'], s['stop'])
                times = EVENT_ROW_TIMES[event][row_start:row_stop]
                for _, _, cn in classify_slice(
                        EVENT_ROWS[event][row_start:row_stop], ROW_COLUMNS,
                        times[0], times[-1]):
                    # TODO: Need to fix for other types
                    c = CLASSES['locomotion'][cn['locomotion']]
                    print((class_name, c))
                    cm[class_name][c] += 1
            print(cm)


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('model')
    parser.add_argument('email')
    parser.add_argument('api_key')
    parser.add_argument('--port',
                        help='Run on this port (default 8080)',
                        default='8080')

    ARGS = parser.parse_args()
    CLIENT = picarus.PicarusClient(email=ARGS.email, api_key=ARGS.api_key)
    data_type, EVENT_ROWS, ROW_COLUMNS = pickle.load(open(ARGS.model))
    EVENT_ROW_TIMES = {
        e: [float(ROW_COLUMNS[row]['meta:time']) for row in rows]
        for e, rows in EVENT_ROWS.items()
    }
    extract_data()