def run_environment(db, config): DEFAULT_DB = f'default_{db}' temp_config_path = prepare_config(config, DEFAULT_DB) if is_container_run(f'{db}-test') is False: subprocess.Popen(['./cli.sh', db], cwd=TESTS_ROOT.joinpath('docker/').resolve(), stdout=OUTPUT, stderr=OUTPUT) atexit.register(stop_container, name=db) db_ready = wait_db(config, DEFAULT_DB) if db_ready: sp = subprocess.Popen([ 'python3', '-m', 'mindsdb', '--api', 'mysql', '--config', temp_config_path ], stdout=OUTPUT, stderr=OUTPUT) atexit.register(stop_mindsdb, sp=sp) api_ready = db_ready and wait_api_ready(config) if db_ready is False or api_ready is False: print( f'Failed by timeout. {db} started={db_ready}, MindsDB started={api_ready}' ) raise Exception() mdb = MindsdbNative(config) datastore = DataStore(config) return mdb, datastore
def startProxy(config): global HARDCODED_USER global HARDCODED_PASSWORD global CERT_PATH global default_store global mdb global datahub """ Create a server and wait for incoming connections until Ctrl-C """ init_logger(config) HARDCODED_USER = config['api']['mysql']['user'] HARDCODED_PASSWORD = config['api']['mysql']['password'] CERT_PATH = config['api']['mysql']['certificate_path'] default_store = DataStore(config) mdb = MindsdbNative(config) datahub = init_datahub(config) host = config['api']['mysql']['host'] port = int(config['api']['mysql']['port']) log.info(f'Starting MindsDB Mysql proxy server on tcp://{host}:{port}') # Create the server if config['debug'] is True: SocketServer.TCPServer.allow_reuse_address = True server = SocketServer.ThreadingTCPServer((host, port), MysqlProxy) atexit.register(MysqlProxy.server_close, srv=server) # Activate the server; this will keep running until you # interrupt the program with Ctrl-C log.info('Waiting for incoming connections...') server.serve_forever()
def __init__(self, config): self.config = config self.dbw = DatabaseWrapper(self.config) self.storage_dir = os.path.join(config['storage_dir'], 'misc') os.makedirs(self.storage_dir, exist_ok=True) self.model_cache = {} self.mindsdb_native = MindsdbNative(self.config) self.dbw = DatabaseWrapper(self.config)
def setUpClass(cls): cls.mdb = MindsdbNative(config) if os.path.isfile(test_csv) is False: r = requests.get("https://s3.eu-west-2.amazonaws.com/mindsdb-example-data/home_rentals.csv") with open(test_csv, 'wb') as f: f.write(r.content) models = cls.mdb.get_models() models = [x['name'] for x in models] if test_predictor_name in models: cls.mdb.delete_model(test_predictor_name) query_ch('create database if not exists test') test_tables = query_ch('show tables from test') test_tables = [x['name'] for x in test_tables] if test_data_table not in test_tables: query_ch(f''' CREATE TABLE test.{test_data_table} ( number_of_rooms Int8, number_of_bathrooms Int8, sqft Int32, location String, days_on_market Int16, initial_price Int32, neighborhood String, rental_price Int32 ) ENGINE = TinyLog() ''') with open(test_csv) as f: csvf = csv.reader(f) i = 0 for row in csvf: if i > 0: number_of_rooms = int(row[0]) number_of_bathrooms = int(row[1]) sqft = int(float(row[2].replace(',','.'))) location = str(row[3]) days_on_market = int(row[4]) initial_price = int(row[5]) neighborhood = str(row[6]) rental_price = int(float(row[7])) query_ch(f'''INSERT INTO test.{test_data_table} VALUES ( {number_of_rooms}, {number_of_bathrooms}, {sqft}, '{location}', {days_on_market}, {initial_price}, '{neighborhood}', {rental_price} )''') i += 1
def __init__(self, config): mongodb_config = config['api'].get('mongodb') assert mongodb_config is not None, 'is no mongodb config!' host = mongodb_config['host'] port = mongodb_config['port'] print(f'start mongo server on {host}:{port}') super().__init__((host, int(port)), MongoRequestHandler) self.mindsdb_env = { 'config': config, 'data_store': DataStore(config), 'mindsdb_native': MindsdbNative(config) } respondersCollection = RespondersCollection() opQueryResponder = OpQueryResponder(respondersCollection) opMsgResponder = OpMsgResponder(respondersCollection) opInsertResponder = OpInsertResponder(respondersCollection) self.operationsHandlersMap = { OP_QUERY: opQueryResponder, OP_MSG: opMsgResponder, OP_INSERT: opInsertResponder } respondersCollection.add( when={'drop': 'system.sessions'}, result={'ok': 1} ) respondersCollection.add( when={'update': 'system.version'}, result={'ok': 1} ) respondersCollection.add( when={'setFeatureCompatibilityVersion': helpers.is_true}, result={'ok': 1} ) # OpMSG=OrderedDict([('features', 1), ('$clusterTime', OrderedDict([('clusterTime', Timestamp(1599748325, 1)), ('signature', OrderedDict([('hash', b'\xb8\xc3\x03\x18\xca\xe6bh\xf0\xcb47,\x924\x8a >\xfc\x91'), ('keyId', 6870854312365391875)]))])), ('$configServerState', OrderedDict([('opTime', OrderedDict([('ts', Timestamp(1599748325, 1)), ('t', 1)]))])), ('$db', 'admin')]) respondersCollection.add( when={'features': helpers.is_true}, result={'ok': 1} ) # OpMSG=OrderedDict([('serverStatus', 1), ('$clusterTime', OrderedDict([('clusterTime', Timestamp(1599748366, 1)), ('signature', OrderedDict([('hash', b'\xa1E}\xbbIU\xc2D\x95++\x82\x88\xb5\x84\xf5\xda)+B'), ('keyId', 6870854312365391875)]))])), ('$configServerState', OrderedDict([('opTime', OrderedDict([('ts', Timestamp(1599748366, 1)), ('t', 1)]))])), ('$db', 'admin')]) respondersCollection.add( when={'serverStatus': helpers.is_true}, result={'ok': 1} ) # OpMSG=OrderedDict([('ismaster', 1), ('$db', 'admin'), ('$clusterTime', OrderedDict([('clusterTime', Timestamp(1599749031, 1)), ('signature', OrderedDict([('hash', b'6\x87\xd5Y\xa7\xc7\xcf$\xab\x1e\xa2{\xe5B\xe5\x99\xdbl\x8d\xf4'), ('keyId', 6870854312365391875)]))])), ('$client', OrderedDict([('application', OrderedDict([('name', 'MongoDB Shell')])), ('driver', OrderedDict([('name', 'MongoDB Internal Client'), ('version', '3.6.3')])), ('os', OrderedDict([('type', 'Linux'), ('name', 'Ubuntu'), ('architecture', 'x86_64'), ('version', '18.04')])), ('mongos', OrderedDict([('host', 'maxs-comp:27103'), ('client', '127.0.0.1:52148'), ('version', '3.6.3')]))])), ('$configServerState', OrderedDict([('opTime', OrderedDict([('ts', Timestamp(1599749031, 1)), ('t', 1)]))]))]) respondersCollection.responders += op_msg_responders
def run_environment(config, apis=['mysql'], override_integration_config={}, override_api_config={}, mindsdb_database='mindsdb', clear_storage=True): temp_config_path = prepare_config(config, mindsdb_database, override_integration_config, override_api_config, clear_storage) config = Config(temp_config_path) api_str = ','.join(apis) sp = subprocess.Popen([ 'python3', '-m', 'mindsdb', '--api', api_str, '--config', temp_config_path ], close_fds=True, stdout=OUTPUT, stderr=OUTPUT) atexit.register(stop_mindsdb, sp=sp) async def wait_port_async(port, timeout): start_time = time.time() started = is_port_in_use(port) while (time.time() - start_time) < timeout and started is False: await asyncio.sleep(1) started = is_port_in_use(port) return started async def wait_apis_start(ports): futures = [wait_port_async(port, 60) for port in ports] success = True for i, future in enumerate(asyncio.as_completed(futures)): success = success and await future return success ports_to_wait = [config['api'][api]['port'] for api in apis] ioloop = asyncio.get_event_loop() if ioloop.is_closed(): ioloop = asyncio.new_event_loop() success = ioloop.run_until_complete(wait_apis_start(ports_to_wait)) ioloop.close() if not success: raise Exception('Cant start mindsdb apis') CONFIG.MINDSDB_STORAGE_PATH = config.paths['predictors'] mdb = MindsdbNative(config) datastore = DataStore(config) return mdb, datastore
def run_environment(db, config, run_apis='mysql'): DEFAULT_DB = f'default_{db}' temp_config_path = prepare_config(config, DEFAULT_DB) if db in ['mssql', 'mongodb']: db_ready = True else: if is_container_run(f'{db}-test') is False: run_container(db) db_ready = wait_db(config, DEFAULT_DB) if isinstance(run_apis, list) is False: run_apis = run_apis.split(',') api_str = ','.join(run_apis) if db_ready: sp = subprocess.Popen([ 'python3', '-m', 'mindsdb', '--api', api_str, '--config', temp_config_path ], stdout=OUTPUT, stderr=OUTPUT) atexit.register(stop_mindsdb, sp=sp) api_ready = True for api in run_apis: apistr = 'mongodb' if api == 'mongodb' else api api_ready = api_ready and wait_api_ready(config, apistr) if api_ready is False: break if db_ready is False or api_ready is False: print( f'Failed by timeout. {db} started={db_ready}, MindsDB started={api_ready}' ) raise Exception() CONFIG.MINDSDB_STORAGE_PATH = config.paths['predictors'] mdb = MindsdbNative(config) datastore = DataStore(config) return mdb, datastore
def initialize_interfaces(config, app): app.default_store = DataStore(config) app.mindsdb_native = MindsdbNative(config) app.config_obj = config
for api_name in apis.keys(): if api_name not in config['api']: print( f"Trying run '{api_name}' API, but is no config for this api.") print(f"Please, fill config['api']['{api_name}']") sys.exit(0) start_functions = { 'http': start_http, 'mysql': start_mysql, 'mongodb': start_mongo } archive_obsolete_predictors(config, '2.11.0') mdb = MindsdbNative(config) cst = CustomModels(config) # @TODO Maybe just use `get_model_data` directly here ? Seems like a useless abstraction model_data_arr = [{ 'name': x['name'], 'predict': x['predict'], 'data_analysis': mdb.get_model_data(x['name'])['data_analysis_v2'] } for x in mdb.get_models()] model_data_arr.extend(cst.get_models()) dbw = DatabaseWrapper(config) dbw.register_predictors(model_data_arr)
def __init__(self, config, storage_dir=None): self.config = config self.dir = storage_dir if isinstance( storage_dir, str) else config.paths['datasources'] self.mindsdb_native = MindsdbNative(config)
def __init__(self, config): self.config = config self.mindsdb_native = MindsdbNative(config) self.custom_models = CustomModels(config)
def __init__(self, config): self.config = config self.mindsdb_native = MindsdbNative(config)
def setUpClass(cls): for mode in ['train', 'test']: os.system(f'rm {test_csv}') cls.mdb = MindsdbNative(config) if os.path.isfile(test_csv) is False: r = requests.get( f"https://raw.githubusercontent.com/mindsdb/mindsdb-examples/master/benchmarks/churn/dataset/{mode}.csv" ) with open(test_csv, 'wb') as f: f.write(r.content) models = cls.mdb.get_models() models = [x['name'] for x in models] if test_predictor_name in models: cls.mdb.delete_model(test_predictor_name) query('create database if not exists test') test_tables = query('show tables from test') test_tables = [x[0] for x in test_tables] if test_data_table not in test_tables: query(f'DROP TABLE IF EXISTS data.{test_data_table}_{mode}') query(f''' CREATE TABLE data.{test_data_table}_{mode} ( CreditScore int, Geography varchar(300), Gender varchar(300), Age int, Tenure int, Balance float, NumOfProducts int, HasCrCard int, IsActiveMember int, EstimatedSalary float, Exited int ) ''') with open(test_csv) as f: csvf = csv.reader(f) i = 0 for row in csvf: if i > 0: CreditScore = int(row[0]) Geography = str(row[1]) Gender = str(row[2]) Age = int(row[3]) Tenure = int(row[4]) Balance = float(row[5]) NumOfProducts = int(row[6]) HasCrCard = int(row[7]) IsActiveMember = int(row[8]) EstimatedSalary = float(row[9]) Exited = int(row[10]) query( f'''INSERT INTO data.{test_data_table}_{mode} VALUES ( {CreditScore}, '{Geography}', '{Gender}', {Age}, {Tenure}, {Balance}, {NumOfProducts}, {HasCrCard}, {IsActiveMember}, {EstimatedSalary}, {Exited} )''') i += 1 os.system(f'rm {test_csv}')
def __init__(self, config): self.config = config self.dir = config.paths['datasources'] self.mindsdb_native = MindsdbNative(config)
def __init__(self, config, storage_dir=None): self.config = config self.dir = storage_dir if isinstance( storage_dir, str) else config['interface']['datastore']['storage_dir'] self.mindsdb_native = MindsdbNative(config)