예제 #1
0
def test_indexing_workbook(testapp, indexer_testapp):
    # First post a single item so that subsequent indexing is incremental
    testapp.post_json('/testing-post-put-patch/', {'required': ''})
    res = indexer_testapp.post_json('/index', {'record': True})
    assert res.json['indexed'] == 1

    from encoded.loadxl import load_all
    from pkg_resources import resource_filename
    inserts = resource_filename('encoded', 'tests/data/inserts/')
    docsdir = [resource_filename('encoded', 'tests/data/documents/')]
    load_all(testapp, inserts, docsdir)
    res = indexer_testapp.post_json('/index', {
        'record': True,
        'is_testing_full': True
    })
    assert res.json['updated']
    assert res.json['indexed']
    ### OPTIONAL: audit via 2-pass is coming...
    #assert res.json['pass2_took']
    ### OPTIONAL: audit via 2-pass is coming...

    # NOTE: Both vis and region indexers are "followup" or secondary indexers
    #       and must be staged by the primary indexer
    res = indexer_testapp.post_json('/index_vis', {'record': True})
    assert res.json['cycle_took']
    assert res.json['title'] == 'vis_indexer'

    res = indexer_testapp.post_json('/index_region', {'record': True})
    assert res.json['cycle_took']
    assert res.json['title'] == 'region_indexer'
    assert res.json['indexed'] > 0

    res = testapp.get('/search/?type=Biosample')
    assert res.json['total'] > 5
예제 #2
0
def index_workbook(request, app):
    from snovault import DBSESSION
    connection = app.registry[DBSESSION].bind.pool.unique_connection()
    connection.detach()
    conn = connection.connection
    conn.autocommit = True
    cursor = conn.cursor()
    cursor.execute("""TRUNCATE resources, transactions CASCADE;""")
    cursor.close()

    from webtest import TestApp
    log_level = request.config.getoption("--log")
    environ = {
        'HTTP_ACCEPT': 'application/json',
        'REMOTE_USER': '******',
    }
    testapp = TestApp(app, environ)

    from encoded.loadxl import load_all
    from pkg_resources import resource_filename
    inserts = resource_filename('encoded', 'tests/data/inserts/')
    docsdir = [resource_filename('encoded', 'tests/data/documents/')]
    load_all(testapp, inserts, docsdir, log_level=log_level)

    testapp.post_json('/index', {'is_testing_full': True})
    yield
예제 #3
0
def workbook(conn, app, app_settings):
    tx = conn.begin_nested()
    try:
        from webtest import TestApp
        environ = {
            'HTTP_ACCEPT': 'application/json',
            'REMOTE_USER': '******',
        }
        testapp = TestApp(app, environ)

        from encoded.loadxl import load_all
        from pkg_resources import resource_filename
        inserts = resource_filename('encoded', 'tests/data/inserts/')
        docsdir = [resource_filename('encoded', 'tests/data/documents/')]
        load_all(testapp, inserts, docsdir)

        yield
    finally:
        tx.rollback()
예제 #4
0
def main():
    import argparse
    parser = argparse.ArgumentParser(
        description="Run development servers", epilog=EPILOG,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument('--app-name', help="Pyramid app name in configfile")
    parser.add_argument('config_uri', help="path to configfile")
    parser.add_argument('--clear', action="store_true", help="Clear existing data")
    parser.add_argument('--init', action="store_true", help="Init database")
    parser.add_argument('--load', action="store_true", help="Load test set")
    parser.add_argument('--datadir', default='/tmp/encoded', help="path to datadir")
    args = parser.parse_args()

    logging.basicConfig()
    # Loading app will have configured from config file. Reconfigure here:
    logging.getLogger('encoded').setLevel(logging.DEBUG)

    from encoded.tests import elasticsearch_fixture, postgresql_fixture
    from contentbase.elasticsearch import create_mapping
    datadir = os.path.abspath(args.datadir)
    pgdata = os.path.join(datadir, 'pgdata')
    esdata = os.path.join(datadir, 'esdata')
    if args.clear:
        for dirname in [pgdata, esdata]:
            if os.path.exists(dirname):
                shutil.rmtree(dirname)
    if args.init:
        postgresql_fixture.initdb(pgdata, echo=True)

    postgres = postgresql_fixture.server_process(pgdata, echo=True)
    elasticsearch = elasticsearch_fixture.server_process(esdata, echo=True)
    processes = [postgres, elasticsearch]

    @atexit.register
    def cleanup_process():
        for process in processes:
            if process.poll() is None:
                process.terminate()
        for process in processes:
            try:
                for line in process.stdout:
                    sys.stdout.write(line.decode('utf-8'))
            except IOError:
                pass
            process.wait()

    if args.init:
        app = get_app(args.config_uri, args.app_name)
        create_mapping.run(app)

    if args.load:
        from webtest import TestApp
        environ = {
            'HTTP_ACCEPT': 'application/json',
            'REMOTE_USER': '******',
        }
        testapp = TestApp(app, environ)

        from encoded.loadxl import load_all
        from pkg_resources import resource_filename
        inserts = resource_filename('encoded', 'tests/data/inserts/')
        docsdir = [resource_filename('encoded', 'tests/data/documents/')]
        load_all(testapp, inserts, docsdir)

    print('Started. ^C to exit.')

    stdouts = [p.stdout for p in processes]

    # Ugly should probably use threads instead
    while True:
        readable, writable, err = select.select(stdouts, [], stdouts, 5)
        for stdout in readable:
            for line in iter(stdout.readline, b''):
                sys.stdout.write(line.decode('utf-8'))
        if err:
            for stdout in err:
                for line in iter(stdout.readline, b''):
                    sys.stdout.write(line.decode('utf-8'))
            break
예제 #5
0
def main():
    import argparse
    parser = argparse.ArgumentParser(
        description="Run development servers", epilog=EPILOG,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument('--app-name', help="Pyramid app name in configfile")
    parser.add_argument('config_uri', help="path to configfile")
    parser.add_argument('--clear', action="store_true", help="Clear existing data")
    parser.add_argument('--init', action="store_true", help="Init database")
    parser.add_argument('--load', action="store_true", help="Load test set")
    parser.add_argument('--datadir', default='/tmp/encoded', help="path to datadir")
    args = parser.parse_args()

    logging.basicConfig()
    # Loading app will have configured from config file. Reconfigure here:
    logging.getLogger('encoded').setLevel(logging.DEBUG)

    from encoded.tests import elasticsearch_fixture, postgresql_fixture
    from encoded.commands import create_mapping
    datadir = os.path.abspath(args.datadir)
    pgdata = os.path.join(datadir, 'pgdata')
    esdata = os.path.join(datadir, 'esdata')
    if args.clear:
        for dirname in [pgdata, esdata]:
            if os.path.exists(dirname):
                shutil.rmtree(dirname)
    if args.init:
        postgresql_fixture.initdb(pgdata, echo=True)

    postgres = postgresql_fixture.server_process(pgdata, echo=True)
    elasticsearch = elasticsearch_fixture.server_process(esdata, echo=True)
    processes = [postgres, elasticsearch]

    @atexit.register
    def cleanup_process():
        for process in processes:
            if process.poll() is None:
                process.terminate()
        for process in processes:
            try:
                for line in process.stdout:
                    sys.stdout.write(line)
            except IOError:
                pass
            process.wait()

    if args.init:
        app = get_app(args.config_uri, args.app_name)
        create_mapping.run(app)

    if args.load:
        from webtest import TestApp
        environ = {
            'HTTP_ACCEPT': 'application/json',
            'REMOTE_USER': '******',
        }
        testapp = TestApp(app, environ)

        from encoded.loadxl import load_all
        from pkg_resources import resource_filename
        inserts = resource_filename('encoded', 'tests/data/inserts/')
        docsdir = [resource_filename('encoded', 'tests/data/documents/')]
        load_all(testapp, inserts, docsdir)

    print('Started. ^C to exit.')

    stdouts = [p.stdout for p in processes]

    # Ugly should probably use threads instead
    while True:
        readable, writable, err = select.select(stdouts, [], stdouts, 5)
        for stdout in readable:
            for line in iter(stdout.readline, ''):
                sys.stdout.write(line)
        if err:
            for stdout in err:
                for line in iter(stdout.readline, ''):
                    sys.stdout.write(line)
            break
def main():
    # https://github.com/gawel/WSGIProxy2/pull/3 (and change to WebTest)
    from wsgiproxy.proxies import ALLOWED_METHODS
    if 'PATCH' not in ALLOWED_METHODS:
        ALLOWED_METHODS.append('PATCH')

    import argparse
    parser = argparse.ArgumentParser(
        description="Import data",
        epilog=EPILOG,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument('--test-only', action='store_true')
    parser.add_argument('--item-type', help="Item type")
    parser.add_argument('--post',
                        dest='method',
                        action='store_const',
                        const="POST",
                        help="Create new data")
    parser.add_argument('--put',
                        dest='method',
                        action='store_const',
                        const="PUT",
                        help="Replace existing data")
    parser.add_argument('--patch',
                        dest='method',
                        action='store_const',
                        const="PATCH",
                        help="Patch existing data")
    parser.add_argument(
        '--username',
        '-u',
        default='',
        help="HTTP username (access_key_id) or import user uuid/email")
    parser.add_argument('--password',
                        '-p',
                        default='',
                        help="HTTP password (secret_access_key)")
    parser.add_argument('--attach',
                        '-a',
                        action='append',
                        default=[],
                        help="Directory to search for attachments")
    parser.add_argument('--app-name', help="Pyramid app name in configfile")
    parser.add_argument(
        'inpath', help="input zip file/directory of excel/csv/tsv sheets.")
    parser.add_argument(
        'url', help="either the url to the application or path to configfile")
    args = parser.parse_args()

    logging.basicConfig()

    url = urlparse(args.url)
    if urlparse(args.url).scheme in ('http', 'https'):
        base = url.scheme + '://' + url.netloc
        username = args.username
        password = args.password
        if url.username:
            base = url.scheme + '://' + url.netloc.split('@', 1)[1]
            assert not args.username
            username = url.username
            if url.password:
                assert not args.password
                password = url.password
        testapp = remote_app(base, username, password)
    else:
        testapp = internal_app(args.url, args.app_name, args.username)

    # Loading app will have configured from config file. Reconfigure here:
    logging.getLogger('encoded').setLevel(logging.INFO)
    logging.getLogger('wsgi').setLevel(logging.WARNING)

    if args.method:
        run(testapp, args.inpath, args.attach, args.method, args.item_type,
            args.test_only)
    else:
        loadxl.load_all(testapp, args.inpath, args.attach, args.test_only)