Example #1
0
    def setUpClass(cls):
        mdb, datastore = run_environment(
            config,
            apis=['mysql'],
            override_integration_config={
                'default_clickhouse': {
                    'publish': True
                }
            },
            override_api_config={'mysql': {
                'ssl': False
            }},
            mindsdb_database=MINDSDB_DATABASE)
        cls.mdb = mdb

        models = cls.mdb.get_models()
        models = [x['name'] for x in models]
        if TEST_PREDICTOR_NAME in models:
            cls.mdb.delete_model(TEST_PREDICTOR_NAME)

        query('create database if not exists test_data')

        if not USE_EXTERNAL_DB_SERVER:
            test_csv_path = Path(DATASETS_PATH).joinpath(
                TEST_DATASET).joinpath('data.csv')

            if TEST_DATA_TABLE not in cls.get_tables_in(cls, 'test_data'):
                print('creating test data table...')
                upload_csv(
                    query=query,
                    columns_map=DATASETS_COLUMN_TYPES[TEST_DATASET],
                    db_types_map=DB_TYPES_MAP,
                    table_name=TEST_DATA_TABLE,
                    csv_path=test_csv_path,
                    template=
                    'create table test_data.%s (%s) ENGINE = MergeTree() ORDER BY days_on_market PARTITION BY location'
                )

        ds = datastore.get_datasource(EXTERNAL_DS_NAME)
        if ds is not None:
            datastore.delete_datasource(EXTERNAL_DS_NAME)

        data = fetch(f'select * from test_data.{TEST_DATA_TABLE} limit 50')
        external_datasource_csv = make_test_csv(EXTERNAL_DS_NAME, data)
        datastore.save_datasource(EXTERNAL_DS_NAME, 'file', 'test.csv',
                                  external_datasource_csv)
Example #2
0
    def setUpClass(cls):
        mdb, datastore = run_environment(config,
                                         apis=['mysql', 'http'],
                                         mindsdb_database=MINDSDB_DATABASE)
        cls.mdb = mdb

        query('create database if not exists test_data')

        if not USE_EXTERNAL_DB_SERVER:
            test_csv_path = Path(DATASETS_PATH).joinpath(
                TEST_DATASET).joinpath('data.csv')
            if TEST_DATA_TABLE not in cls.get_tables_in(cls, 'test_data'):
                print('creating test data table...')
                upload_csv(query=query,
                           columns_map=DATASETS_COLUMN_TYPES[TEST_DATASET],
                           db_types_map=DB_TYPES_MAP,
                           table_name=TEST_DATA_TABLE,
                           csv_path=test_csv_path)

        data = fetch(f'select * from test_data.{TEST_DATA_TABLE} limit 50',
                     as_dict=True)
        cls.external_datasource_csv_path = make_test_csv(
            EXTERNAL_DS_NAME, data)
Example #3
0
    def setUpClass(cls):
        mdb, datastore = run_environment(
            config,
            apis=['http', 'mysql'],
            override_integration_config={'default_mariadb': {
                'publish': True
            }},
            mindsdb_database=MINDSDB_DATABASE)
        cls.mdb = mdb

        models = cls.mdb.get_models()
        models = [x['name'] for x in models]
        if TEST_PREDICTOR_NAME in models:
            cls.mdb.delete_model(TEST_PREDICTOR_NAME)

        if not USE_EXTERNAL_DB_SERVER:
            query('create database if not exists test_data')
            test_tables = fetch('show tables from test_data', as_dict=False)
            test_tables = [x[0] for x in test_tables]

            if TEST_DATA_TABLE not in test_tables:
                test_csv_path = Path(DATASETS_PATH).joinpath(
                    TEST_DATASET).joinpath('data.csv')
                upload_csv(query=query,
                           columns_map=DATASETS_COLUMN_TYPES[TEST_DATASET],
                           db_types_map=DB_TYPES_MAP,
                           table_name=TEST_DATA_TABLE,
                           csv_path=test_csv_path)

        ds = datastore.get_datasource(EXTERNAL_DS_NAME)
        if ds is not None:
            datastore.delete_datasource(EXTERNAL_DS_NAME)

        data = fetch(f'select * from test_data.{TEST_DATA_TABLE} limit 50')
        external_datasource_csv = make_test_csv(EXTERNAL_DS_NAME, data)
        datastore.save_datasource(EXTERNAL_DS_NAME, 'file', 'test.csv',
                                  external_datasource_csv)
Example #4
0
    print(msg)


if __name__ == "__main__":
    # Create the workspace.
    print(f'creating workspace "{WORKSPACE}"...')

    result = create_workspace(WORKSPACE)
    response = result.json()

    if response["errors"]:
        print("errors encountered:\n", *response["errors"], sep="\n")
        sys.exit(1)
    else:
        print("created succesfully")

    # Upload the data tables.
    for table in ["members", "clubs", "membership"]:
        print(f"uploading {table}...")
        result = upload_csv(f"data/{table}.csv", WORKSPACE, table)
        if result.status_code != 200:
            error_message(result.json())
            sys.exit(1)
    print("tables uploaded successfully")

    # Create the graph.
    print("creating graph...")
    graph = create_graph(WORKSPACE, "boston", ["members", "clubs"],
                         "membership")
    pprint(graph.json())