def setUpClass(cls): run_environment(apis=['http']) config.update(json.loads(Path(CONFIG_PATH).read_text())) data = fetch(f'select * from test_data.{TEST_DATA_TABLE} limit 50', as_dict=True) cls.external_datasource_csv_path = make_test_csv( EXTERNAL_DS_NAME, data)
def setUpClass(cls): mdb, datastore = run_environment( config, apis=['mysql'], override_integration_config={ 'default_clickhouse': { 'publish': True } }, override_api_config={'mysql': { 'ssl': False }}, mindsdb_database=MINDSDB_DATABASE) cls.mdb = mdb models = cls.mdb.get_models() models = [x['name'] for x in models] if TEST_PREDICTOR_NAME in models: cls.mdb.delete_model(TEST_PREDICTOR_NAME) query('create database if not exists test_data') if not USE_EXTERNAL_DB_SERVER: test_csv_path = Path(DATASETS_PATH).joinpath( TEST_DATASET).joinpath('data.csv') if TEST_DATA_TABLE not in cls.get_tables_in(cls, 'test_data'): print('creating test data table...') upload_csv( query=query, columns_map=DATASETS_COLUMN_TYPES[TEST_DATASET], db_types_map=DB_TYPES_MAP, table_name=TEST_DATA_TABLE, csv_path=test_csv_path, template= 'create table test_data.%s (%s) ENGINE = MergeTree() ORDER BY days_on_market PARTITION BY location' ) ds = datastore.get_datasource(EXTERNAL_DS_NAME) if ds is not None: datastore.delete_datasource(EXTERNAL_DS_NAME) data = fetch(f'select * from test_data.{TEST_DATA_TABLE} limit 50') external_datasource_csv = make_test_csv(EXTERNAL_DS_NAME, data) datastore.save_datasource(EXTERNAL_DS_NAME, 'file', 'test.csv', external_datasource_csv)
def setUpClass(cls): mdb, datastore = run_environment( config, apis=['mysql'], override_integration_config={'default_mssql': { 'publish': True }}, mindsdb_database=MINDSDB_DATABASE) cls.mdb = mdb models = cls.mdb.get_models() models = [x['name'] for x in models] if TEST_PREDICTOR_NAME in models: cls.mdb.delete_model(TEST_PREDICTOR_NAME) data = fetch( f'select * from test_data.{TEST_DATA_TABLE} order by rental_price offset 0 rows fetch next 50 rows only' ) external_datasource_csv = make_test_csv(EXTERNAL_DS_NAME, data) datastore.save_datasource(EXTERNAL_DS_NAME, 'file', 'test.csv', external_datasource_csv)
def setUpClass(cls): mdb, datastore = run_environment(config, apis=['mysql', 'http'], mindsdb_database=MINDSDB_DATABASE) cls.mdb = mdb query('create database if not exists test_data') if not USE_EXTERNAL_DB_SERVER: test_csv_path = Path(DATASETS_PATH).joinpath( TEST_DATASET).joinpath('data.csv') if TEST_DATA_TABLE not in cls.get_tables_in(cls, 'test_data'): print('creating test data table...') upload_csv(query=query, columns_map=DATASETS_COLUMN_TYPES[TEST_DATASET], db_types_map=DB_TYPES_MAP, table_name=TEST_DATA_TABLE, csv_path=test_csv_path) data = fetch(f'select * from test_data.{TEST_DATA_TABLE} limit 50', as_dict=True) cls.external_datasource_csv_path = make_test_csv( EXTERNAL_DS_NAME, data)
def setUpClass(cls): mdb, datastore = run_environment( config, apis=['http', 'mysql'], override_integration_config={'default_mariadb': { 'publish': True }}, mindsdb_database=MINDSDB_DATABASE) cls.mdb = mdb models = cls.mdb.get_models() models = [x['name'] for x in models] if TEST_PREDICTOR_NAME in models: cls.mdb.delete_model(TEST_PREDICTOR_NAME) if not USE_EXTERNAL_DB_SERVER: query('create database if not exists test_data') test_tables = fetch('show tables from test_data', as_dict=False) test_tables = [x[0] for x in test_tables] if TEST_DATA_TABLE not in test_tables: test_csv_path = Path(DATASETS_PATH).joinpath( TEST_DATASET).joinpath('data.csv') upload_csv(query=query, columns_map=DATASETS_COLUMN_TYPES[TEST_DATASET], db_types_map=DB_TYPES_MAP, table_name=TEST_DATA_TABLE, csv_path=test_csv_path) ds = datastore.get_datasource(EXTERNAL_DS_NAME) if ds is not None: datastore.delete_datasource(EXTERNAL_DS_NAME) data = fetch(f'select * from test_data.{TEST_DATA_TABLE} limit 50') external_datasource_csv = make_test_csv(EXTERNAL_DS_NAME, data) datastore.save_datasource(EXTERNAL_DS_NAME, 'file', 'test.csv', external_datasource_csv)
def setUpClass(cls): mdb, datastore = run_environment( config, apis=['mysql'], override_integration_config={'default_mssql': { 'publish': True }}, mindsdb_database=MINDSDB_DATABASE) cls.mdb = mdb models = cls.mdb.get_models() models = [x['name'] for x in models] if TEST_PREDICTOR_NAME in models: cls.mdb.delete_model(TEST_PREDICTOR_NAME) test_csv_path = Path(DATASETS_PATH).joinpath('home_rentals').joinpath( 'data.csv') res = query( "SELECT name FROM master.dbo.sysdatabases where name = 'mindsdb_test'", fetch=True) if len(res) == 0: query("create database mindsdb_test") res = query(''' select * from sys.schemas where name = 'mindsdb_schema'; ''', fetch=True) if len(res) == 0: query(''' create schema [mindsdb_schema]; ''') if not USE_EXTERNAL_DB_SERVER: # show tables from test test_tables = query(f''' select 1 from sysobjects where name='{TEST_DATA_TABLE}' and xtype='U'; ''', fetch=True) if len(test_tables) == 0: print('creating test data table...') query(f''' CREATE TABLE mindsdb_schema.{TEST_DATA_TABLE} ( number_of_rooms int, number_of_bathrooms int, sqft int, location varchar(100), days_on_market int, initial_price int, neighborhood varchar(100), rental_price int ) ''') with open(test_csv_path) as f: csvf = csv.reader(f) i = 0 for row in csvf: if i > 0: number_of_rooms = int(row[0]) number_of_bathrooms = int(row[1]) sqft = int(float(row[2].replace(',', '.'))) location = str(row[3]) days_on_market = int(row[4]) initial_price = int(row[5]) neighborhood = str(row[6]) rental_price = int(float(row[7])) query(f''' INSERT INTO mindsdb_schema.{TEST_DATA_TABLE} VALUES ( {number_of_rooms}, {number_of_bathrooms}, {sqft}, '{location}', {days_on_market}, {initial_price}, '{neighborhood}', {rental_price} )''') i += 1 if i % 100 == 0: print(i) print('done') ds = datastore.get_datasource(EXTERNAL_DS_NAME) if ds is not None: datastore.delete_datasource(EXTERNAL_DS_NAME) data = query(f'select * from test_data.{TEST_DATA_TABLE} limit 50', fetch=True, as_dict=True) external_datasource_csv = make_test_csv(EXTERNAL_DS_NAME, data) datastore.save_datasource(EXTERNAL_DS_NAME, 'file', 'test.csv', external_datasource_csv)