def start_analysis(self, name): datasource_record = session.query(Datasource).filter_by( company_id=self.company_id, name=name).first() if datasource_record.analysis is not None: return None semaphor_record = session.query(Semaphor).filter_by( company_id=self.company_id, entity_id=datasource_record.id, entity_type='datasource').first() if semaphor_record is None: semaphor_record = Semaphor(company_id=self.company_id, entity_id=datasource_record.id, entity_type='datasource', action='write') session.add(semaphor_record) session.commit() else: return try: analysis = self.mindsdb_native.analyse_dataset( self.get_datasource_obj(name, raw=True)) datasource_record = session.query(Datasource).filter_by( company_id=self.company_id, name=name).first() datasource_record.analysis = json.dumps(analysis) session.commit() except Exception as e: log.error(e) finally: semaphor_record = session.query(Semaphor).filter_by( company_id=self.company_id, entity_id=datasource_record.id, entity_type='datasource').first() session.delete(semaphor_record) session.commit()
def save_datasource(self, name, source_type, source, file_path=None, company_id=None): if source_type == 'file' and (file_path is None): raise Exception('`file_path` argument required when source_type == "file"') datasource_record = session.query(Datasource).filter_by(company_id=company_id, name=name).first() while datasource_record is not None: raise Exception(f'Datasource with name {name} already exists') try: datasource_record = Datasource( company_id=company_id, name=name, datasources_version=mindsdb_datasources.__version__, mindsdb_version=mindsdb_version ) session.add(datasource_record) session.commit() ds_meta_dir = os.path.join(self.dir, f'{company_id}@@@@@{name}') os.mkdir(ds_meta_dir) ds, creation_info = self.create_datasource(source_type, source, file_path, company_id, ds_meta_dir) if hasattr(ds, 'get_columns') and hasattr(ds, 'get_row_count'): try: column_names = ds.get_columns() row_count = ds.get_row_count() except Exception: df = ds.df column_names = list(df.keys()) row_count = len(df) else: df = ds.df column_names = list(df.keys()) row_count = len(df) if '' in column_names or len(column_names) != len(set(column_names)): shutil.rmtree(ds_meta_dir) raise Exception('Each column in datasource must have unique non-empty name') datasource_record.creation_info = json.dumps(creation_info) datasource_record.data = json.dumps({ 'source_type': source_type, 'source': source, 'row_count': row_count, 'columns': [dict(name=x) for x in column_names] }) self.fs_store.put(f'{company_id}@@@@@{name}', f'datasource_{company_id}_{datasource_record.id}', self.dir) session.commit() except Exception as e: log.error(f'Error creating datasource {name}, exception: {e}') try: self.delete_datasource(name, company_id=company_id) except Exception: pass raise e return self.get_datasource_obj(name, raw=True, company_id=company_id)
def put(self, name): # Back compatibility with previous endpoint version params = request.json.get('params') or request.json params_keys = params.keys() for param in ['predictor', 'stream_in', 'stream_out']: if param not in params_keys: return abort(400, 'Please provide "{}"'.format(param)) if 'integration' not in params_keys and 'connection' not in params_keys: return abort( 400, "'integration' in case of local installation and 'connection' in case of cloud are required." ) if 'integration' in params_keys: integration = get_db_integration(params['integration'], request.company_id) if integration is None: return abort( 404, 'Integration "{}" doesn\'t exist'.format( params['integration'])) if integration['type'] not in STREAM_INTEGRATION_TYPES: return abort( 400, 'Integration "{}" is not of type [{}]'.format( params['integration'], '/'.join(STREAM_INTEGRATION_TYPES))) else: if 'type' not in params_keys: return abort(404, "'type' parameter is required in case of cloud.") if db.session.query(db.Stream).filter_by( company_id=request.company_id, name=name).first() is not None: return abort(404, 'Stream "{}" already exists'.format(name)) if db.session.query(db.Predictor).filter_by( company_id=request.company_id, name=params['predictor']).first() is None: return abort( 404, 'Predictor "{}" doesn\'t exist'.format(params['predictor'])) stream = db.Stream(company_id=request.company_id, name=name, integration=params.get('integration'), predictor=params['predictor'], stream_in=params['stream_in'], stream_out=params['stream_out'], anomaly_stream=params.get('anomaly_stream'), learning_stream=params.get('learning_stream'), type=params.get('type'), connection_info=params.get('connection')) session.add(stream) session.commit() return {'success': True}, 200
def add(self, name, integration_name, integration_query, query_fields, predictor_name, predictor_fields): ai_table_record = AITable(name=name.lower(), integration_name=integration_name, integration_query=integration_query, query_fields=query_fields, predictor_name=predictor_name, predictor_columns=predictor_fields) session.add(ai_table_record) session.commit()
def save_datasource(self, name, source_type, source=None, file_path=None, company_id=None): dataset_record = session.query(Dataset).filter_by( company_id=company_id, name=name).first() while dataset_record is not None: raise Exception(f'Dataset with name {name} already exists') if source_type == 'views': source_type = 'view_query' elif source_type == 'files': source_type = 'file' try: dataset_record = Dataset( company_id=company_id, name=name, datasources_version=mindsdb_datasources.__version__, mindsdb_version=mindsdb_version) session.add(dataset_record) session.commit() ds, creation_info = self.create_datasource(source_type, source, file_path, company_id) ds_meta = self._get_ds_meta(ds) column_names = ds_meta['column_names'] row_count = ds_meta['row_count'] dataset_record.ds_class = creation_info['class'] dataset_record.creation_info = json.dumps(creation_info) dataset_record.data = json.dumps({ 'source_type': source_type, 'source': source, 'row_count': row_count, 'columns': [dict(name=x) for x in column_names] }) session.commit() except Exception as e: log.error(f'Error creating dataset {name}, exception: {e}') try: self.delete_datasource(name, company_id=company_id) except Exception: pass raise e return self.get_datasource_obj(name, raw=True, company_id=company_id)
def save_model_data(self, name, data): predictor_record = Predictor.query.filter_by( company_id=self.company_id, name=name, is_custom=True).first() if predictor_record is None: predictor_record = Predictor(company_id=self.company_id, name=name, is_custom=True, data=data) session.add(predictor_record) else: predictor_record.data = data session.commit()
def emit(self, record): self.format(record) if len(record.message.strip(' \n')) == 0 \ or (record.threadName == 'ray_print_logs' and 'mindsdb-logger' not in record.message): return log_type = record.levelname source = f'file: {record.pathname} - line: {record.lineno}' payload = record.msg if telemtry_enabled: pass # @TODO: Enable once we are sure no sensitive info is being outputed in the logs # if log_type in ['INFO']: # add_breadcrumb( # category='auth', # message=str(payload), # level='info', # ) # Might be too much traffic if we send this for users with slow networks #if log_type in ['DEBUG']: # add_breadcrumb( # category='auth', # message=str(payload), # level='debug', # ) if log_type in ['ERROR', 'WARNING']: trace = str(traceback.format_stack(limit=20)) trac_log = Log(log_type='traceback', source=source, payload=trace, company_id=self.company_id) session.add(trac_log) session.commit() if telemtry_enabled: add_breadcrumb( category='stack_trace', message=trace, level='info', ) if log_type in ['ERROR']: capture_message(str(payload)) if log_type in ['WARNING']: capture_message(str(payload)) log = Log(log_type=str(log_type), source=source, payload=str(payload), company_id=self.company_id) session.add(log) session.commit()
def _save(self): self._db_config = _null_to_empty(self._db_config) config_record = session.query(Configuration).filter_by( company_id=self.company_id).first() if config_record is not None: config_record.data = json.dumps(self._db_config) else: config_record = Configuration(company_id=self.company_id, data=json.dumps(self._db_config)) session.add(config_record) session.commit()
def _setup_for_creation(self, name): if name in self.predictor_cache: del self.predictor_cache[name] # Here for no particular reason, because we want to run this sometimes but not too often self._invalidate_cached_predictors() predictor_dir = Path(self.config.paths['predictors']).joinpath(name) create_directory(predictor_dir) predictor_record = Predictor(company_id=self.company_id, name=name, is_custom=False) session.add(predictor_record) session.commit()
def store_stream(self, stream): """Stories a created stream.""" stream_rec = Stream(name=stream.stream_name, connection_params=self.connection_info, advanced_params=self.advanced_info, _type=stream._type, predictor=stream.predictor, integration=self.name, company_id=self.company_id, stream_in=stream.stream_in_name, stream_out=stream.stream_out_name) session.add(stream_rec) session.commit() self.streams[stream.stream_name] = stream.stop_event
def store_stream(self, stream): """Stories a created stream.""" stream_name = f"{self.name}_{stream.predictor}" stream_rec = Stream(name=stream_name, host=stream.host, port=stream.port, db=stream.db, _type=stream._type, predictor=stream.predictor, integration=self.name, company_id=self.company_id, stream_in=stream.stream_in_name, stream_out=stream.stream_out_name) session.add(stream_rec) session.commit() self.streams[stream_name] = stream.stop_event
def _lock_predictor(self, id, mode='write'): from mindsdb.interfaces.storage.db import session, Semaphor while True: semaphor_record = session.query(Semaphor).filter_by(company_id=self.company_id, entity_id=id, entity_type='predictor').first() if semaphor_record is not None: if mode == 'read' and semaphor_record.action == 'read': return True try: semaphor_record = Semaphor(company_id=self.company_id, entity_id=id, entity_type='predictor', action=mode) session.add(semaphor_record) session.commit() return True except Excpetion as e: pass time.sleep(1)
def put(self, name): params = request.json.get('params') if not isinstance(params, dict): abort(400, "type of 'params' must be dict") for param in [ "predictor", "stream_in", "stream_out", "integration_name" ]: if param not in params: abort(400, f"'{param}' is missed.") integration_name = params['integration_name'] integration_info = get_integration(integration_name) if not integration_info: abort(400, f"integration '{integration_name}' doesn't exist.") if integration_info["type"] not in ['redis', 'kafka']: abort( 400, f"only integration of redis or kafka might be used to crate redis streams. got: '{integration_info.type}' type" ) connection_params = params.get('connect', {}) advanced_params = params.get('advanced', {}) predictor = params['predictor'] stream_in = params['stream_in'] stream_out = params['stream_out'] _type = params.get('type', 'forecast') if _type.lower() == StreamTypes.timeseries: ts_params = params.get('ts_params') else: ts_params = {} if predictor not in get_predictors(): abort( 400, f"requested predictor '{predictor}' is not ready or doens't exist" ) stream = StreamDB(_type=_type, name=name, connection_params=connection_params, advanced_params=advanced_params, predictor=predictor, stream_in=stream_in, stream_out=stream_out, integration=integration_name, company_id=COMPANY_ID, ts_params=ts_params) session.add(stream) session.commit() return {"status": "success", "stream_name": name}, 200
def save_file(self, name, file_path, file_name=None, company_id=None): """ Save the file to our store Args: name (str): with that name file will be available in sql api file_name (str): file name file_path (str): path to the file company_id (int): company id Returns: int: id of 'file' record in db """ if file_name is None: file_name = Path(file_path).name try: ds_meta_dir = Path(self.dir).joinpath(f'{company_id}@@@@@{name}') ds_meta_dir.mkdir() source = ds_meta_dir.joinpath(file_name) shutil.move(file_path, str(source)) ds = FileDS(str(source)) ds_meta = self._get_ds_meta(ds) column_names = ds_meta['column_names'] if ds_meta['column_names'] is not None: column_names = json.dumps( [dict(name=x) for x in ds_meta['column_names']]) file_record = File(name=name, company_id=company_id, source_file_path=file_name, file_path=str(source), row_count=ds_meta['row_count'], columns=column_names) session.add(file_record) session.commit() self.fs_store.put(f'{company_id}@@@@@{name}', f'file_{company_id}_{file_record.id}', self.dir) except Exception as e: log.error(e) shutil.rmtree(ds_meta_dir) raise return file_record.id
def put(self, name): params = request.json.get('params') if not isinstance(params, dict): abort(400, "type of 'params' must be dict") for param in [ "host", "port", "predictor", "stream_in", "stream_out", "integration_name" ]: if param not in params: abort(400, f"'{param}' is missed.") integration_name = params['integration_name'] integration_info = get_integration(integration_name) if not integration_info: abort(400, f"integration '{integration_name}' doesn't exist.") if integration_info.type != 'redis': abort( 400, f"only integration of redis type might be used to crate redis streams. got: '{integration_info.type}' type" ) host = integration_info['host'] port = integration_info['port'] db = integration_info.get('db', 0) predictor = params['predictor'] stream_in = params['stream_in'] stream_out = params['stream_out'] _type = params.get('type', 'forecast') if predictor not in get_predictors(): abort( 400, f"requested predictor '{predictor}' is not ready or doens't exist" ) stream = StreamDB(_type=_type, name=name, host=host, port=port, db=db, predictor=predictor, stream_in=stream_in, stream_out=stream_out, integration=integration_name, company_id=COMPANY_ID) session.add(stream) session.commit() return {"status": "success", "stream_name": name}, 200
def add(self, name, query, integration_name, company_id=None): integration_records = session.query(Integration).filter_by( company_id=company_id).all() integration_id = None for record in integration_records: if record.name == integration_name: integration_id = record.id break else: raise Exception( f"Can't find integration with name: {integration_name}") view_record = View(name=name, company_id=company_id, query=query, integration_id=integration_id) session.add(view_record) session.commit()
def start_analysis(self, name, company_id=None): dataset_record = session.query(Dataset).filter_by( company_id=company_id, name=name).first() if dataset_record.analysis_id is not None: return None semaphor_record = session.query(Semaphor).filter_by( company_id=company_id, entity_id=dataset_record.id, entity_type='dataset').first() if semaphor_record is None: semaphor_record = Semaphor(company_id=company_id, entity_id=dataset_record.id, entity_type='dataset', action='write') session.add(semaphor_record) session.commit() else: return try: analysis = self.model_interface.analyse_dataset( ds=self.get_datasource_obj(name, raw=True, company_id=company_id), company_id=company_id) dataset_record = session.query(Dataset).filter_by( company_id=company_id, name=name).first() analysis_record = Analysis( analysis=json.dumps(analysis, cls=CustomJSONEncoder)) session.add(analysis_record) session.flush() dataset_record.analysis_id = analysis_record.id session.commit() except Exception as e: log.error(e) finally: semaphor_record = session.query(Semaphor).filter_by( company_id=company_id, entity_id=dataset_record.id, entity_type='dataset').first() session.delete(semaphor_record) session.commit()
def _setup_for_creation(self, name): from mindsdb_datasources import FileDS, ClickhouseDS, MariaDS, MySqlDS, PostgresDS, MSSQLDS, MongoDS, SnowflakeDS, AthenaDS import mindsdb_native from mindsdb_native import F from mindsdb_native.libs.constants.mindsdb import DATA_SUBTYPES from mindsdb.interfaces.storage.db import session, Predictor if name in self.predictor_cache: del self.predictor_cache[name] # Here for no particular reason, because we want to run this sometimes but not too often self._invalidate_cached_predictors() predictor_dir = Path(self.config.paths['predictors']).joinpath(name) create_directory(predictor_dir) predictor_record = Predictor(company_id=self.company_id, name=name, is_custom=False) session.add(predictor_record) session.commit()
def save_datasource(self, name, source_type, source, file_path=None): datasource_record = Datasource(company_id=self.company_id, name=name) if source_type == 'file' and (file_path is None): raise Exception( '`file_path` argument required when source_type == "file"') ds_meta_dir = os.path.join(self.dir, name) os.mkdir(ds_meta_dir) session.add(datasource_record) session.commit() datasource_record = session.query(Datasource).filter_by( company_id=self.company_id, name=name).first() try: if source_type == 'file': source = os.path.join(ds_meta_dir, source) shutil.move(file_path, source) ds = FileDS(source) creation_info = { 'class': 'FileDS', 'args': [source], 'kwargs': {} } elif source_type in self.config['integrations']: integration = self.config['integrations'][source_type] ds_class_map = { 'clickhouse': ClickhouseDS, 'mariadb': MariaDS, 'mysql': MySqlDS, 'postgres': PostgresDS, 'mssql': MSSQLDS, 'mongodb': MongoDS, 'snowflake': SnowflakeDS } try: dsClass = ds_class_map[integration['type']] except KeyError: raise KeyError( f"Unknown DS type: {source_type}, type is {integration['type']}" ) if integration['type'] in ['clickhouse']: creation_info = { 'class': dsClass.__name__, 'args': [], 'kwargs': { 'query': source['query'], 'user': integration['user'], 'password': integration['password'], 'host': integration['host'], 'port': integration['port'] } } ds = dsClass(**creation_info['kwargs']) elif integration['type'] in [ 'mssql', 'postgres', 'mariadb', 'mysql' ]: creation_info = { 'class': dsClass.__name__, 'args': [], 'kwargs': { 'query': source['query'], 'user': integration['user'], 'password': integration['password'], 'host': integration['host'], 'port': integration['port'] } } if 'database' in integration: creation_info['kwargs']['database'] = integration[ 'database'] if 'database' in source: creation_info['kwargs']['database'] = source[ 'database'] ds = dsClass(**creation_info['kwargs']) elif integration['type'] == 'snowflake': creation_info = { 'class': dsClass.__name__, 'args': [], 'kwargs': { 'query': source['query'], 'schema': source['schema'], 'warehouse': source['warehouse'], 'database': source['database'], 'host': integration['host'], 'password': integration['password'], 'user': integration['user'], 'account': integration['account'] } } ds = dsClass(**creation_info['kwargs']) elif integration['type'] == 'mongodb': if isinstance(source['find'], str): source['find'] = json.loads(source['find']) creation_info = { 'class': dsClass.__name__, 'args': [], 'kwargs': { 'database': source['database'], 'collection': source['collection'], 'query': source['find'], 'user': integration['user'], 'password': integration['password'], 'host': integration['host'], 'port': integration['port'] } } ds = dsClass(**creation_info['kwargs']) else: # This probably only happens for urls ds = FileDS(source) creation_info = { 'class': 'FileDS', 'args': [source], 'kwargs': {} } df = ds.df if '' in df.columns or len(df.columns) != len(set(df.columns)): shutil.rmtree(ds_meta_dir) raise Exception( 'Each column in datasource must have unique non-empty name' ) datasource_record.creation_info = json.dumps(creation_info) datasource_record.data = json.dumps({ 'source_type': source_type, 'source': source, 'row_count': len(df), 'columns': [dict(name=x) for x in list(df.keys())] }) self.fs_store.put( name, f'datasource_{self.company_id}_{datasource_record.id}', self.dir) except Exception: if os.path.isdir(ds_meta_dir): shutil.rmtree(ds_meta_dir) raise session.commit() return self.get_datasource_obj(name, raw=True), name
def add_db_integration(name, data, company_id): if 'database_name' not in data: data['database_name'] = name if 'publish' not in data: data['publish'] = True bundle_path = data.get('secure_connect_bundle') if data.get('type') in ('cassandra', 'scylla') and _is_not_empty_str(bundle_path): if os.path.isfile(bundle_path) is False: raise Exception(f'Can not get access to file: {bundle_path}') integrations_dir = Config()['paths']['integrations'] p = Path(bundle_path) data['secure_connect_bundle'] = p.name integration_record = Integration(name=name, data=data, company_id=company_id) session.add(integration_record) session.commit() integration_id = integration_record.id folder_name = f'integration_files_{company_id}_{integration_id}' integration_dir = os.path.join(integrations_dir, folder_name) create_directory(integration_dir) shutil.copyfile(bundle_path, os.path.join(integration_dir, p.name)) FsStore().put(folder_name, integration_dir, integrations_dir) elif data.get('type') in ('mysql', 'mariadb'): ssl = data.get('ssl') files = {} temp_dir = None if ssl is True: for key in ['ssl_ca', 'ssl_cert', 'ssl_key']: if key not in data: continue if os.path.isfile(data[key]) is False: if _is_not_empty_str(data[key]) is False: raise Exception( "'ssl_ca', 'ssl_cert' and 'ssl_key' must be paths or inline certs" ) if temp_dir is None: temp_dir = tempfile.mkdtemp( prefix='integration_files_') cert_file_name = data.get(f'{key}_name', f'{key}.pem') cert_file_path = os.path.join(temp_dir, cert_file_name) with open(cert_file_path, 'wt') as f: f.write(data[key]) data[key] = cert_file_path files[key] = data[key] p = Path(data[key]) data[key] = p.name integration_record = Integration(name=name, data=data, company_id=company_id) session.add(integration_record) session.commit() integration_id = integration_record.id if len(files) > 0: integrations_dir = Config()['paths']['integrations'] folder_name = f'integration_files_{company_id}_{integration_id}' integration_dir = os.path.join(integrations_dir, folder_name) create_directory(integration_dir) for file_path in files.values(): p = Path(file_path) shutil.copyfile(file_path, os.path.join(integration_dir, p.name)) FsStore().put(folder_name, integration_dir, integrations_dir) else: integration_record = Integration(name=name, data=data, company_id=company_id) session.add(integration_record) session.commit()