Esempio n. 1
0
def setup_server(environment='development', server_root=''):
    global test_server, model

    patch_auth()
    test_server = run_server(test_mode=True,
                             environment=environment,
                             server_root=server_root)
Esempio n. 2
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    utils.patch_auth()
    host = '127.0.0.1'
    port = utils.get_free_port('http')
    ssl_port = utils.get_free_port('https')
    test_server = utils.run_server(host, port, ssl_port, test_mode=True)
Esempio n. 3
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    utils.patch_auth()
    host = '127.0.0.1'
    port = utils.get_free_port('http')
    ssl_port = utils.get_free_port('https')
    test_server = utils.run_server(host, port, ssl_port, test_mode=True)
Esempio n. 4
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    patch_auth(sudo=False)
    model = kimchi.mockmodel.MockModel("/tmp/obj-store-test")
    host = "127.0.0.1"
    port = get_free_port("http")
    test_server = run_server(host, port, None, test_mode=True, model=model)
Esempio n. 5
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    model = kimchi.mockmodel.MockModel("/tmp/obj-store-test")
    host = "127.0.0.1"
    port = utils.get_free_port("http")
    ssl_port = None
    test_server = utils.run_server(host, port, ssl_port, test_mode=True, model=model)
Esempio n. 6
0
 def setUp(self):
     global port, host, model, test_server
     cherrypy.request.headers = {'Accept': 'application/json'}
     model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
     patch_auth()
     port = get_free_port()
     host = '127.0.0.1'
     test_server = run_server(host, port, None, test_mode=True, model=model)
Esempio n. 7
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    patch_auth(sudo=False)
    model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port('http')
    test_server = run_server(host, port, None, test_mode=True, model=model)
Esempio n. 8
0
 def setUp(self):
     global port, host, model, test_server
     cherrypy.request.headers = {'Accept': 'application/json'}
     model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
     patch_auth()
     port = get_free_port()
     host = '127.0.0.1'
     test_server = run_server(host, port, None, test_mode=True, model=model)
Esempio n. 9
0
    def test_vm_livemigrate_persistent_API(self):
        patch_auth()

        inst = model.Model(libvirt_uri='qemu:///system',
                           objstore_loc=self.tmp_store)

        host = '127.0.0.1'
        port = get_free_port('http')
        ssl_port = get_free_port('https')
        cherrypy_port = get_free_port('cherrypy_port')

        with RollbackContext() as rollback:
            test_server = run_server(host, port, ssl_port, test_mode=True,
                                     cherrypy_port=cherrypy_port, model=inst)
            rollback.prependDefer(test_server.stop)

            self.request = partial(request, host, ssl_port)

            self.create_vm_test()
            rollback.prependDefer(utils.rollback_wrapper, self.inst.vm_delete,
                                  u'test_vm_migrate')

            # removing cdrom because it is not shared storage and will make
            # the migration fail
            dev_list = self.inst.vmstorages_get_list('test_vm_migrate')
            self.inst.vmstorage_delete('test_vm_migrate',  dev_list[0])

            try:
                self.inst.vm_start('test_vm_migrate')
            except Exception, e:
                self.fail('Failed to start the vm, reason: %s' % e.message)

            migrate_url = "/plugins/kimchi/vms/%s/migrate" % 'test_vm_migrate'

            req = json.dumps({'remote_host': KIMCHI_LIVE_MIGRATION_TEST,
                             'user': '******'})
            resp = self.request(migrate_url, req, 'POST')
            self.assertEquals(202, resp.status)
            task = json.loads(resp.read())
            wait_task(self._task_lookup, task['id'])
            task = json.loads(
                self.request(
                    '/plugins/kimchi/tasks/%s' % task['id'],
                    '{}'
                ).read()
            )
            self.assertEquals('finished', task['status'])

            try:
                remote_conn = self.get_remote_conn()
                rollback.prependDefer(remote_conn.close)
                remote_vm = remote_conn.lookupByName('test_vm_migrate')
                self.assertTrue(remote_vm.isPersistent())
                remote_vm.destroy()
                remote_vm.undefine()
            except Exception, e:
                self.fail('Migration test failed: %s' % e.message)
Esempio n. 10
0
def setup_server(environment='development'):
    global test_server, model, host, port, ssl_port

    patch_auth()
    host = '127.0.0.1'
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    test_server = run_server(host, port, ssl_port, test_mode=True,
                             environment=environment)
Esempio n. 11
0
def setup_server(environment='development'):
    global test_server, model, host, port

    patch_auth()
    model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port()
    test_server = run_server(host, port, None, test_mode=True, model=model,
                             environment=environment)
Esempio n. 12
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    patch_auth(sudo=False)
    model = mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    test_server = run_server(host, port, ssl_port, test_mode=True, model=model)
def setUpModule():
    global test_server, model, host, port, ssl_port, cherrypy_port

    patch_auth()
    model = Model(None, "/tmp/obj-store-test")
    host = "127.0.0.1"
    port = get_free_port("http")
    ssl_port = get_free_port("https")
    cherrypy_port = get_free_port("cherrypy_port")
    test_server = run_server(host, port, ssl_port, test_mode=True, cherrypy_port=cherrypy_port, model=model)
Esempio n. 14
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    utils.patch_auth()
    model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = utils.get_free_port('http')
    ssl_port = utils.get_free_port('https')
    test_server = utils.run_server(host, port, ssl_port, test_mode=True,
                                   model=model)
Esempio n. 15
0
def setUpModule():
    global test_server, model, host, port, ssl_port, objstore_loc

    patch_auth()
    objstore_loc = config.get_object_store()
    model = kimchi.mockmodel.MockModel(objstore_loc)
    host = '127.0.0.1'
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    test_server = run_server(host, port, ssl_port, test_mode=True,
                             model=model)
Esempio n. 16
0
def setUpModule():
    global test_server, model, host, ssl_port, tmpfile

    patch_auth()
    tmpfile = tempfile.mktemp()
    model = MockModel(tmpfile)
    host = "127.0.0.1"
    port = get_free_port("http")
    ssl_port = get_free_port("https")
    cherrypy_port = get_free_port("cherrypy_port")
    test_server = run_server(host, port, ssl_port, test_mode=True, cherrypy_port=cherrypy_port, model=model)
Esempio n. 17
0
def setUpModule():
    global test_server, model, host, port, ssl_port, cherrypy_port

    patch_auth()
    model = Model(None, '/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    cherrypy_port = get_free_port('cherrypy_port')
    test_server = run_server(host, port, ssl_port, test_mode=True,
                             cherrypy_port=cherrypy_port, model=model)
Esempio n. 18
0
def setUpModule():
    global host, port, ssl_port, model, test_server, fake_iso
    cherrypy.request.headers = {"Accept": "application/json"}
    model = mockmodel.MockModel("/tmp/obj-store-test")
    patch_auth()
    port = get_free_port("http")
    ssl_port = get_free_port("https")
    host = "127.0.0.1"
    test_server = run_server(host, port, ssl_port, test_mode=True, model=model)
    fake_iso = "/tmp/fake.iso"
    open(fake_iso, "w").close()
Esempio n. 19
0
def setUpModule():
    global test_server, model, host, port, ssl_port, cherrypy_port, tmpfile

    utils.patch_auth()
    tmpfile = tempfile.mktemp()
    host = '127.0.0.1'
    port = utils.get_free_port('http')
    ssl_port = utils.get_free_port('https')
    cherrypy_port = utils.get_free_port('cherrypy_port')
    test_server = utils.run_server(host, port, ssl_port, test_mode=True,
                                   cherrypy_port=cherrypy_port)
Esempio n. 20
0
def setUpModule():
    global host, port, ssl_port, model, test_server, fake_iso
    cherrypy.request.headers = {'Accept': 'application/json'}
    model = mockmodel.MockModel('/tmp/obj-store-test')
    patch_auth()
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    host = '127.0.0.1'
    test_server = run_server(host, port, ssl_port, test_mode=True, model=model)
    fake_iso = '/tmp/fake.iso'
    open(fake_iso, 'w').close()
Esempio n. 21
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    patch_auth(sudo=False)
    model = mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    test_server = run_server(host, port, ssl_port, test_mode=True, model=model)

    # Create fake ISO to do the tests
    construct_fake_iso(fake_iso, True, '12.04', 'ubuntu')
Esempio n. 22
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    patch_auth(sudo=False)
    model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    test_server = run_server(host, port, ssl_port, test_mode=True, model=model)

    # Create fake ISO to do the tests
    construct_fake_iso(fake_iso, True, '12.04', 'ubuntu')
Esempio n. 23
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    patch_auth(sudo=False)
    model = kimchi.mockmodel.MockModel("/tmp/obj-store-test")
    host = "127.0.0.1"
    port = get_free_port("http")
    ssl_port = get_free_port("https")
    test_server = run_server(host, port, ssl_port, test_mode=True, model=model)

    # Create fake ISO to do the tests
    construct_fake_iso(fake_iso, True, "12.04", "ubuntu")
Esempio n. 24
0
def setUpModule():
    global test_server, model, host, port, ssl_port

    model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = utils.get_free_port('http')
    ssl_port = utils.get_free_port('https')
    test_server = utils.run_server(host,
                                   port,
                                   ssl_port,
                                   test_mode=True,
                                   model=model)
Esempio n. 25
0
 def setUp(self):
     global host, port, ssl_port, model, test_server, fake_iso
     cherrypy.request.headers = {'Accept': 'application/json'}
     model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
     patch_auth()
     port = get_free_port('http')
     ssl_port = get_free_port('https')
     host = '127.0.0.1'
     test_server = run_server(host, port, ssl_port, test_mode=True,
                              model=model)
     fake_iso = '/tmp/fake.iso'
     open(fake_iso, 'w').close()
Esempio n. 26
0
def setUpModule():
    global test_server, model, host, port, ssl_port, cherrypy_port, tmpfile

    utils.patch_auth()
    tmpfile = tempfile.mktemp()
    model = mockmodel.MockModel(tmpfile)
    host = '127.0.0.1'
    port = utils.get_free_port('http')
    ssl_port = utils.get_free_port('https')
    cherrypy_port = utils.get_free_port('cherrypy_port')
    test_server = utils.run_server(host, port, ssl_port, test_mode=True,
                                   cherrypy_port=cherrypy_port, model=model)
Esempio n. 27
0
def setup_server(environment='development'):
    global test_server, model, host, port

    patch_auth()
    model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port()
    test_server = run_server(host,
                             port,
                             None,
                             test_mode=True,
                             model=model,
                             environment=environment)
Esempio n. 28
0
 def test_server_start(self):
     """
     Test that we can start a server and receive HTTP:200.
     """
     host = '127.0.0.1'
     port = utils.get_free_port()
     model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
     s = utils.run_server(host, port, None, test_mode=True, model=model)
     try:
         resp = utils.request(host, port, '/')
         self.assertEquals(200, resp.status)
     except:
         raise
     finally:
         os.unlink('/tmp/obj-store-test')
         s.stop()
Esempio n. 29
0
def setUpModule():
    global test_server, model, host, port, ssl_port, cherrypy_port

    patch_auth()
    model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
    host = '127.0.0.1'
    port = get_free_port('http')
    ssl_port = get_free_port('https')
    cherrypy_port = get_free_port('cherrypy_port')
    test_server = run_server(host, port, ssl_port, test_mode=True,
                             cherrypy_port=cherrypy_port, model=model)

    # Create fake ISO to do the tests
    iso_gen.construct_fake_iso(fake_iso, True, '12.04', 'ubuntu')
    iso_gen.construct_fake_iso("/var/lib/libvirt/images/fedora.iso", True,
                               "17", "fedora")
Esempio n. 30
0
 def test_server_start(self):
     """
     Test that we can start a server and receive HTTP:200.
     """
     host = '127.0.0.1'
     port = utils.get_free_port()
     model = kimchi.mockmodel.MockModel('/tmp/obj-store-test')
     s = utils.run_server(host, port, None, test_mode=True, model=model)
     try:
         resp = utils.request(host, port, '/')
         self.assertEquals(200, resp.status)
     except:
         raise
     finally:
         os.unlink('/tmp/obj-store-test')
         s.stop()
Esempio n. 31
0
 def test_server_start(self):
     """
     Test that we can start a server and receive a response.  Right now we
     have no content so we expect HTTP:404
     """
     host = '127.0.0.1'
     port = utils.get_free_port()
     s = utils.run_server(host, port, test_mode=True)
     try:
         resp = utils.request(host, port, '/')
         data = json.loads(resp.read())
         self.assertEquals(200, resp.status)
         self.assertEquals('localhost', data['hostname'])
     except:
         raise
     finally:
         s.stop()
Esempio n. 32
0
def setUpModule():
    global test_server, model

    utils.patch_auth()
    test_server = utils.run_server(test_mode=True)
Esempio n. 33
0
            return

        # get POST query
        data_length = int(self.headers['Content-Length'])
        post_data_json = self.rfile.read(data_length).decode('utf-8')

        check_results = self.query.check(post_data_json,
                                         self._get_valid_attrs(),
                                         self._get_valid_colors())
        if check_results['error']:
            self.response(check_results['error'])
            return

        post_data_dict = check_results['dict']

        # if query is valid - check for namesake
        namesake_error = self.query.check_namesake(
            self._namesake(post_data_dict['name']))
        if namesake_error:
            self.response(namesake_error)
            return

        # all right - store cat info to db and send success info to client
        if dict_to_db(CATS_TABLE, post_data_dict):
            self.response("Success: cat {} added to database".format(
                post_data_dict['name']))


if __name__ == '__main__':
    run_server(task=5, handler=Task5RequestHandler)
Esempio n. 34
0
def setUpModule():
    global test_server, model, tmpfile

    utils.patch_auth()
    tmpfile = tempfile.mktemp()
    test_server = utils.run_server(test_mode=True)
Esempio n. 35
0
from datetime import datetime

from logger import Logger
from utils import run_server

if __name__ == '__main__':
    logger = Logger('main')
    logger.info(f'server started running at {datetime.now()}')
    run_server()
    logger.info(f'server terminated at {datetime.now()}')
Esempio n. 36
0
#!/usr/bin/env python3
from sanic import response
from sanic.exceptions import abort
from utils import create_app, run_server, forward_request

app, libtoken, license, index_html = create_app()


@app.route("/")
async def index(request):
    return response.html(index_html)


@app.route('/favicon.ico')
async def favicon(request):
    abort(404)


@app.route('/<path:[^/].*?>')
async def forward(request, path):
    return await forward_request(request, path, libtoken, license)


if __name__ == '__main__':
    run_server(app, '0.0.0.0', 8000, False)
Esempio n. 37
0
        # limit in query string
        if query_params.get('limit'):
            limit = query_params['limit'][0]
            query += f' LIMIT {limit}'

        return query

    def do_GET(self):
        """ Handles GET query """
        valid_attr_values = db_table_column_names(CATS_TABLE)
        cats_number = db_table_size(CATS_TABLE)

        error = self.query.check(self.path, valid_attr_values, cats_number)
        if error:
            self.response(error)
        else:
            # set sql query
            query_params = parse_query(self.path)[1]
            query = self._set_sql_query(query_params)

            # get data from db
            data = db_query_realdict(query)

            # send data to client
            self.response(data)


if __name__ == '__main__':
    run_server(task=4, handler=Task4RequestHandler)
Esempio n. 38
0
    def run(self, definition, run_type=None):
        if run_type == "performance":
            for op_type, op_value in definition.items():
                # run docker mode
                run_count = op_value["run_count"]
                run_params = op_value["params"]
                container = None

                if op_type == "insert":
                    if not run_params:
                        logger.debug("No run params")
                        continue
                    for index, param in enumerate(run_params):
                        logger.info("Definition param: %s" % str(param))
                        table_name = param["table_name"]
                        volume_name = param["db_path_prefix"]
                        print(table_name)
                        (data_type, table_size, index_file_size, dimension,
                         metric_type) = parser.table_parser(table_name)
                        for k, v in param.items():
                            if k.startswith("server."):
                                # Update server config
                                utils.modify_config(k,
                                                    v,
                                                    type="server",
                                                    db_slave=None)
                        container = utils.run_server(self.image,
                                                     test_type="remote",
                                                     volume_name=volume_name,
                                                     db_slave=None)
                        time.sleep(2)
                        milvus = MilvusClient(table_name)
                        # Check has table or not
                        if milvus.exists_table():
                            milvus.delete()
                            time.sleep(10)
                        milvus.create_table(table_name, dimension,
                                            index_file_size, metric_type)
                        # debug
                        # milvus.create_index("ivf_sq8", 16384)
                        res = self.do_insert(milvus, table_name, data_type,
                                             dimension, table_size,
                                             param["ni_per"])
                        logger.info(res)
                        # wait for file merge
                        time.sleep(table_size * dimension / 5000000)
                        # Clear up
                        utils.remove_container(container)

                elif op_type == "query":
                    for index, param in enumerate(run_params):
                        logger.info("Definition param: %s" % str(param))
                        table_name = param["dataset"]
                        volume_name = param["db_path_prefix"]
                        (data_type, table_size, index_file_size, dimension,
                         metric_type) = parser.table_parser(table_name)
                        for k, v in param.items():
                            if k.startswith("server."):
                                utils.modify_config(k, v, type="server")
                        container = utils.run_server(self.image,
                                                     test_type="remote",
                                                     volume_name=volume_name,
                                                     db_slave=None)
                        time.sleep(2)
                        milvus = MilvusClient(table_name)
                        logger.debug(milvus.show_tables())
                        # Check has table or not
                        if not milvus.exists_table():
                            logger.warning(
                                "Table %s not existed, continue exec next params ..."
                                % table_name)
                            continue
                        # parse index info
                        index_types = param["index.index_types"]
                        nlists = param["index.nlists"]
                        # parse top-k, nq, nprobe
                        top_ks, nqs, nprobes = parser.search_params_parser(
                            param)
                        for index_type in index_types:
                            for nlist in nlists:
                                result = milvus.describe_index()
                                logger.info(result)
                                # milvus.drop_index()
                                # milvus.create_index(index_type, nlist)
                                result = milvus.describe_index()
                                logger.info(result)
                                logger.info(milvus.count())
                                # preload index
                                milvus.preload_table()
                                logger.info("Start warm up query")
                                res = self.do_query(milvus, table_name, [1],
                                                    [1], 1, 1)
                                logger.info("End warm up query")
                                # Run query test
                                for nprobe in nprobes:
                                    logger.info(
                                        "index_type: %s, nlist: %s, metric_type: %s, nprobe: %s"
                                        % (index_type, nlist, metric_type,
                                           nprobe))
                                    res = self.do_query(
                                        milvus, table_name, top_ks, nqs,
                                        nprobe, run_count)
                                    headers = ["Nq/Top-k"]
                                    headers.extend(
                                        [str(top_k) for top_k in top_ks])
                                    utils.print_table(headers, nqs, res)
                        utils.remove_container(container)

        elif run_type == "insert_performance":
            for op_type, op_value in definition.items():
                # run docker mode
                run_count = op_value["run_count"]
                run_params = op_value["params"]
                container = None
                if not run_params:
                    logger.debug("No run params")
                    continue
                for index, param in enumerate(run_params):
                    logger.info("Definition param: %s" % str(param))
                    table_name = param["table_name"]
                    volume_name = param["db_path_prefix"]
                    print(table_name)
                    (data_type, table_size, index_file_size, dimension,
                     metric_type) = parser.table_parser(table_name)
                    for k, v in param.items():
                        if k.startswith("server."):
                            # Update server config
                            utils.modify_config(k,
                                                v,
                                                type="server",
                                                db_slave=None)
                    container = utils.run_server(self.image,
                                                 test_type="remote",
                                                 volume_name=volume_name,
                                                 db_slave=None)
                    time.sleep(2)
                    milvus = MilvusClient(table_name)
                    # Check has table or not
                    if milvus.exists_table():
                        milvus.delete()
                        time.sleep(10)
                    milvus.create_table(table_name, dimension, index_file_size,
                                        metric_type)
                    # debug
                    # milvus.create_index("ivf_sq8", 16384)
                    res = self.do_insert(milvus, table_name, data_type,
                                         dimension, table_size,
                                         param["ni_per"])
                    logger.info(res)
                    # wait for file merge
                    time.sleep(table_size * dimension / 5000000)
                    # Clear up
                    utils.remove_container(container)

        elif run_type == "search_performance":
            for op_type, op_value in definition.items():
                # run docker mode
                run_count = op_value["run_count"]
                run_params = op_value["params"]
                container = None
                for index, param in enumerate(run_params):
                    logger.info("Definition param: %s" % str(param))
                    table_name = param["dataset"]
                    volume_name = param["db_path_prefix"]
                    (data_type, table_size, index_file_size, dimension,
                     metric_type) = parser.table_parser(table_name)
                    for k, v in param.items():
                        if k.startswith("server."):
                            utils.modify_config(k, v, type="server")
                    container = utils.run_server(self.image,
                                                 test_type="remote",
                                                 volume_name=volume_name,
                                                 db_slave=None)
                    time.sleep(2)
                    milvus = MilvusClient(table_name)
                    logger.debug(milvus.show_tables())
                    # Check has table or not
                    if not milvus.exists_table():
                        logger.warning(
                            "Table %s not existed, continue exec next params ..."
                            % table_name)
                        continue
                    # parse index info
                    index_types = param["index.index_types"]
                    nlists = param["index.nlists"]
                    # parse top-k, nq, nprobe
                    top_ks, nqs, nprobes = parser.search_params_parser(param)
                    for index_type in index_types:
                        for nlist in nlists:
                            result = milvus.describe_index()
                            logger.info(result)
                            # milvus.drop_index()
                            # milvus.create_index(index_type, nlist)
                            result = milvus.describe_index()
                            logger.info(result)
                            logger.info(milvus.count())
                            # preload index
                            milvus.preload_table()
                            logger.info("Start warm up query")
                            res = self.do_query(milvus, table_name, [1], [1],
                                                1, 1)
                            logger.info("End warm up query")
                            # Run query test
                            for nprobe in nprobes:
                                logger.info(
                                    "index_type: %s, nlist: %s, metric_type: %s, nprobe: %s"
                                    % (index_type, nlist, metric_type, nprobe))
                                res = self.do_query(milvus, table_name, top_ks,
                                                    nqs, nprobe, run_count)
                                headers = ["Nq/Top-k"]
                                headers.extend(
                                    [str(top_k) for top_k in top_ks])
                                utils.print_table(headers, nqs, res)
                    utils.remove_container(container)

        elif run_type == "accuracy":
            """
            {
                "dataset": "random_50m_1024_512", 
                "index.index_types": ["flat", ivf_flat", "ivf_sq8"],
                "index.nlists": [16384],
                "nprobes": [1, 32, 128], 
                "nqs": [100],
                "top_ks": [1, 64], 
                "server.use_blas_threshold": 1100, 
                "server.cpu_cache_capacity": 256
            }
            """
            for op_type, op_value in definition.items():
                if op_type != "query":
                    logger.warning(
                        "invalid operation: %s in accuracy test, only support query operation"
                        % op_type)
                    break
                run_count = op_value["run_count"]
                run_params = op_value["params"]
                container = None

                for index, param in enumerate(run_params):
                    logger.info("Definition param: %s" % str(param))
                    table_name = param["dataset"]
                    sift_acc = False
                    if "sift_acc" in param:
                        sift_acc = param["sift_acc"]
                    (data_type, table_size, index_file_size, dimension,
                     metric_type) = parser.table_parser(table_name)
                    for k, v in param.items():
                        if k.startswith("server."):
                            utils.modify_config(k, v, type="server")
                    volume_name = param["db_path_prefix"]
                    container = utils.run_server(self.image,
                                                 test_type="remote",
                                                 volume_name=volume_name,
                                                 db_slave=None)
                    time.sleep(2)
                    milvus = MilvusClient(table_name)
                    # Check has table or not
                    if not milvus.exists_table():
                        logger.warning(
                            "Table %s not existed, continue exec next params ..."
                            % table_name)
                        continue

                    # parse index info
                    index_types = param["index.index_types"]
                    nlists = param["index.nlists"]
                    # parse top-k, nq, nprobe
                    top_ks, nqs, nprobes = parser.search_params_parser(param)
                    if sift_acc is True:
                        # preload groundtruth data
                        true_ids_all = self.get_groundtruth_ids(table_size)
                    acc_dict = {}
                    for index_type in index_types:
                        for nlist in nlists:
                            result = milvus.describe_index()
                            logger.info(result)
                            milvus.create_index(index_type, nlist)
                            # preload index
                            milvus.preload_table()
                            # Run query test
                            for nprobe in nprobes:
                                logger.info(
                                    "index_type: %s, nlist: %s, metric_type: %s, nprobe: %s"
                                    % (index_type, nlist, metric_type, nprobe))
                                for top_k in top_ks:
                                    for nq in nqs:
                                        result_ids = []
                                        id_prefix = "%s_index_%s_nlist_%s_metric_type_%s_nprobe_%s_top_k_%s_nq_%s" % \
                                                    (table_name, index_type, nlist, metric_type, nprobe, top_k, nq)
                                        if sift_acc is False:
                                            self.do_query_acc(
                                                milvus, table_name, top_k, nq,
                                                nprobe, id_prefix)
                                            if index_type != "flat":
                                                # Compute accuracy
                                                base_name = "%s_index_flat_nlist_%s_metric_type_%s_nprobe_%s_top_k_%s_nq_%s" % \
                                                    (table_name, nlist, metric_type, nprobe, top_k, nq)
                                                avg_acc = self.compute_accuracy(
                                                    base_name, id_prefix)
                                                logger.info(
                                                    "Query: <%s> accuracy: %s"
                                                    % (id_prefix, avg_acc))
                                        else:
                                            result_ids, result_distances = self.do_query_ids(
                                                milvus, table_name, top_k, nq,
                                                nprobe)
                                            debug_file_ids = "0.5.3_result_ids"
                                            debug_file_distances = "0.5.3_result_distances"
                                            with open(debug_file_ids,
                                                      "w+") as fd:
                                                total = 0
                                                for index, item in enumerate(
                                                        result_ids):
                                                    true_item = true_ids_all[:
                                                                             nq, :
                                                                             top_k].tolist(
                                                                             )[index]
                                                    tmp = set(
                                                        item).intersection(
                                                            set(true_item))
                                                    total = total + len(tmp)
                                                    fd.write(
                                                        "query: N-%d, intersection: %d, total: %d\n"
                                                        % (index, len(tmp),
                                                           total))
                                                    fd.write("%s\n" %
                                                             str(item))
                                                    fd.write("%s\n" %
                                                             str(true_item))
                                            acc_value = self.get_recall_value(
                                                true_ids_all[:nq, :top_k].
                                                tolist(), result_ids)
                                            logger.info(
                                                "Query: <%s> accuracy: %s" %
                                                (id_prefix, acc_value))
                    # # print accuracy table
                    # headers = [table_name]
                    # headers.extend([str(top_k) for top_k in top_ks])
                    # utils.print_table(headers, nqs, res)

                    # remove container, and run next definition
                    logger.info("remove container, and run next definition")
                    utils.remove_container(container)

        elif run_type == "stability":
            for op_type, op_value in definition.items():
                if op_type != "query":
                    logger.warning(
                        "invalid operation: %s in accuracy test, only support query operation"
                        % op_type)
                    break
                run_count = op_value["run_count"]
                run_params = op_value["params"]
                container = None
                for index, param in enumerate(run_params):
                    logger.info("Definition param: %s" % str(param))
                    table_name = param["dataset"]
                    index_type = param["index_type"]
                    volume_name = param["db_path_prefix"]
                    (data_type, table_size, index_file_size, dimension,
                     metric_type) = parser.table_parser(table_name)

                    # set default test time
                    if "during_time" not in param:
                        during_time = 100  # seconds
                    else:
                        during_time = int(param["during_time"]) * 60
                    # set default query process num
                    if "query_process_num" not in param:
                        query_process_num = 10
                    else:
                        query_process_num = int(param["query_process_num"])

                    for k, v in param.items():
                        if k.startswith("server."):
                            utils.modify_config(k, v, type="server")

                    container = utils.run_server(self.image,
                                                 test_type="remote",
                                                 volume_name=volume_name,
                                                 db_slave=None)
                    time.sleep(2)
                    milvus = MilvusClient(table_name)
                    # Check has table or not
                    if not milvus.exists_table():
                        logger.warning(
                            "Table %s not existed, continue exec next params ..."
                            % table_name)
                        continue

                    start_time = time.time()
                    insert_vectors = [[
                        random.random() for _ in range(dimension)
                    ] for _ in range(10000)]
                    i = 0
                    while time.time() < start_time + during_time:
                        i = i + 1
                        processes = []
                        # do query
                        # for i in range(query_process_num):
                        #     milvus_instance = MilvusClient(table_name)
                        #     top_k = random.choice([x for x in range(1, 100)])
                        #     nq = random.choice([x for x in range(1, 100)])
                        #     nprobe = random.choice([x for x in range(1, 1000)])
                        #     # logger.info("index_type: %s, nlist: %s, metric_type: %s, nprobe: %s" % (index_type, nlist, metric_type, nprobe))
                        #     p = Process(target=self.do_query, args=(milvus_instance, table_name, [top_k], [nq], [nprobe], run_count, ))
                        #     processes.append(p)
                        #     p.start()
                        #     time.sleep(0.1)
                        # for p in processes:
                        #     p.join()
                        milvus_instance = MilvusClient(table_name)
                        top_ks = random.sample([x for x in range(1, 100)], 3)
                        nqs = random.sample([x for x in range(1, 1000)], 3)
                        nprobe = random.choice([x for x in range(1, 500)])
                        res = self.do_query(milvus, table_name, top_ks, nqs,
                                            nprobe, run_count)
                        if i % 10 == 0:
                            status, res = milvus_instance.insert(
                                insert_vectors,
                                ids=[x for x in range(len(insert_vectors))])
                            if not status.OK():
                                logger.error(status)
                            # status = milvus_instance.drop_index()
                            # if not status.OK():
                            #     logger.error(status)
                            # index_type = random.choice(["flat", "ivf_flat", "ivf_sq8"])
                            milvus_instance.create_index(index_type, 16384)
                            result = milvus.describe_index()
                            logger.info(result)
                            # milvus_instance.create_index("ivf_sq8", 16384)
                    utils.remove_container(container)

        else:
            logger.warning("Run type: %s not supported" % run_type)
Esempio n. 39
0
def setUpModule():
    global test_server, model, tmpfile

    utils.patch_auth()
    tmpfile = tempfile.mktemp()
    test_server = utils.run_server(test_mode=True)
Esempio n. 40
0
def setUpModule():
    global test_server, model

    utils.patch_auth()
    test_server = utils.run_server(test_mode=True)
Esempio n. 41
0
        """
        image = request.data
        processed_image = self.__calling_fn(image)
        return image_generic_pb2.Image(data=processed_image)


if __name__ == '__main__':
    logging.basicConfig(
        format='[ %(levelname)s ] %(asctime)s (%(module)s) %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S',
        level=logging.INFO)

    calling_fn = utils.get_calling_function()
    if not calling_fn:
        exit(1)

    server = grpc.server(futures.ThreadPoolExecutor())
    image_generic_pb2_grpc.add_ImageGenericServiceServicer_to_server(
        ServiceImpl(calling_fn),
        server)

    # Add reflection
    service_names = (
        image_generic_pb2.DESCRIPTOR.services_by_name['ImageGenericService'].full_name,
        grpc_reflection.SERVICE_NAME
    )
    grpc_reflection.enable_server_reflection(service_names, server)

    utils.run_server(server)
Esempio n. 42
0
def setup_server(environment='development'):
    global test_server, model

    patch_auth()
    test_server = run_server(test_mode=True, environment=environment)