def __init__(self): self.config = Config() self.fs_store = FsSotre() self.company_id = os.environ.get('MINDSDB_COMPANY_ID', None) self.dir = self.config.paths['datasources'] self.mindsdb_native = NativeInterface()
def __init__(self, connection_info, advanced_info, topic_in, topic_out, predictor, _type): self.connection_info = connection_info self.advanced_info = advanced_info self.predictor = predictor self.stream_in_name = topic_in self.stream_out_name = topic_out self.consumer = kafka.KafkaConsumer( **self.connection_info, **self.advanced_info.get('consumer', {})) self.consumer.subscribe(topics=[self.stream_in_name]) self.producer = kafka.KafkaProducer( **self.connection_info, **self.advanced_info.get('producer', {})) self.admin = kafka.KafkaAdminClient(**self.connection_info) try: self.topic = NewTopic(self.stream_out_name, num_partitions=1, replication_factor=1) self.admin.create_topics([self.topic]) except kafka.errors.TopicAlreadyExistsError: pass self._type = _type self.native_interface = NativeInterface() self.format_flag = 'explain' self.stop_event = Event() self.company_id = os.environ.get('MINDSDB_COMPANY_ID', None) self.caches = {} if self._type == 'timeseries': super().__init__(target=KafkaStream.make_timeseries_predictions, args=(self, )) else: super().__init__(target=KafkaStream.make_prediction, args=(self, ))
def initialize_interfaces(app): app.default_store = DataStore() app.naitve_interface = NativeInterface() app.custom_models = CustomModels() app.dbw = DatabaseWrapper() config = Config() app.config_obj = config
def __init__(self): self.config = Config() self.fs_store = FsSotre() self.company_id = os.environ.get('MINDSDB_COMPANY_ID', None) self.dbw = DatabaseWrapper() self.storage_dir = self.config['paths']['custom_models'] os.makedirs(self.storage_dir, exist_ok=True) self.model_cache = {} self.mindsdb_native = NativeInterface() self.dbw = DatabaseWrapper()
def __init__(self, host, port, database, stream_in, stream_out, predictor, _type): self.host = host self.port = port self.db = database self.predictor = predictor self.client = self._get_client() self.stream_in_name = stream_in self.stream_out_name = stream_out self.stream_in = self.client.Stream(stream_in) self.stream_out = self.client.Stream(stream_out) self._type = _type self.native_interface = NativeInterface() self.format_flag = 'explain' self.stop_event = Event() self.company_id = os.environ.get('MINDSDB_COMPANY_ID', None) super().__init__(target=RedisStream.make_prediction, args=(self,))
def __init__(self, config): mongodb_config = config['api'].get('mongodb') assert mongodb_config is not None, 'is no mongodb config!' host = mongodb_config['host'] port = mongodb_config['port'] log.debug(f'start mongo server on {host}:{port}') super().__init__((host, int(port)), MongoRequestHandler) self.mindsdb_env = { 'config': config, 'data_store': DataStore(), 'mindsdb_native': NativeInterface() } respondersCollection = RespondersCollection() opQueryResponder = OpQueryResponder(respondersCollection) opMsgResponder = OpMsgResponder(respondersCollection) opInsertResponder = OpInsertResponder(respondersCollection) self.operationsHandlersMap = { OP_QUERY: opQueryResponder, OP_MSG: opMsgResponder, OP_INSERT: opInsertResponder } respondersCollection.add(when={'drop': 'system.sessions'}, result={'ok': 1}) respondersCollection.add(when={'update': 'system.version'}, result={'ok': 1}) respondersCollection.add( when={'setFeatureCompatibilityVersion': helpers.is_true}, result={'ok': 1}) # OpMSG=OrderedDict([('features', 1), ('$clusterTime', OrderedDict([('clusterTime', Timestamp(1599748325, 1)), ('signature', OrderedDict([('hash', b'\xb8\xc3\x03\x18\xca\xe6bh\xf0\xcb47,\x924\x8a >\xfc\x91'), ('keyId', 6870854312365391875)]))])), ('$configServerState', OrderedDict([('opTime', OrderedDict([('ts', Timestamp(1599748325, 1)), ('t', 1)]))])), ('$db', 'admin')]) respondersCollection.add(when={'features': helpers.is_true}, result={'ok': 1}) # OpMSG=OrderedDict([('serverStatus', 1), ('$clusterTime', OrderedDict([('clusterTime', Timestamp(1599748366, 1)), ('signature', OrderedDict([('hash', b'\xa1E}\xbbIU\xc2D\x95++\x82\x88\xb5\x84\xf5\xda)+B'), ('keyId', 6870854312365391875)]))])), ('$configServerState', OrderedDict([('opTime', OrderedDict([('ts', Timestamp(1599748366, 1)), ('t', 1)]))])), ('$db', 'admin')]) respondersCollection.add(when={'serverStatus': helpers.is_true}, result={'ok': 1}) # OpMSG=OrderedDict([('ismaster', 1), ('$db', 'admin'), ('$clusterTime', OrderedDict([('clusterTime', Timestamp(1599749031, 1)), ('signature', OrderedDict([('hash', b'6\x87\xd5Y\xa7\xc7\xcf$\xab\x1e\xa2{\xe5B\xe5\x99\xdbl\x8d\xf4'), ('keyId', 6870854312365391875)]))])), ('$client', OrderedDict([('application', OrderedDict([('name', 'MongoDB Shell')])), ('driver', OrderedDict([('name', 'MongoDB Internal Client'), ('version', '3.6.3')])), ('os', OrderedDict([('type', 'Linux'), ('name', 'Ubuntu'), ('architecture', 'x86_64'), ('version', '18.04')])), ('mongos', OrderedDict([('host', 'maxs-comp:27103'), ('client', '127.0.0.1:52148'), ('version', '3.6.3')]))])), ('$configServerState', OrderedDict([('opTime', OrderedDict([('ts', Timestamp(1599749031, 1)), ('t', 1)]))]))]) respondersCollection.responders += responders
def __init__(self, name, connection_info, advanced_info, stream_in, stream_out, predictor, _type): self.stream_name = name self.connection_info = connection_info self.connection_info.update(advanced_info) self.predictor = predictor self.client = self._get_client() self.stream_in_name = stream_in self.stream_out_name = stream_out self.stream_in = self.client.Stream(stream_in) self.stream_out = self.client.Stream(stream_out) self._type = _type self.native_interface = NativeInterface() self.format_flag = 'explain' self.stop_event = Event() self.company_id = os.environ.get('MINDSDB_COMPANY_ID', None) if self._type == 'timeseries': super().__init__(target=RedisStream.make_timeseries_predictions, args=(self,)) else: super().__init__(target=RedisStream.make_predictions, args=(self,))
def __init__(self, name, connection_info, advanced_info, stream_in, stream_out, predictor, _type, **ts_params): self.stream_name = name self.connection_info = connection_info self.connection_info.update(advanced_info) self.predictor = predictor self.client = self._get_client() self.stream_in_name = stream_in self.stream_out_name = stream_out self.stream_in = self.client.Stream(stream_in) self.stream_out = self.client.Stream(stream_out) self._type = _type self.native_interface = NativeInterface() self.format_flag = 'explain' self.stop_event = Event() self.company_id = os.environ.get('MINDSDB_COMPANY_ID', None) self.ts_params = ts_params if self._type.lower() == StreamTypes.timeseries: self.target = self.ts_params.get('target') self.window = self.ts_params.get('window_size') self.gb = self.ts_params.get('group_by') self.dt = self.ts_params.get('order_by') super().__init__(target=RedisStream.make_timeseries_predictions, args=(self,)) else: super().__init__(target=RedisStream.make_predictions, args=(self,))
api: { 'port': config['api'][api]['port'], 'process': None, 'started': False } for api in api_arr } if not ray_based: apis['rcp'] = {'process': rpc_proc, 'started': True} start_functions = { 'http': start_http, 'mysql': start_mysql, 'mongodb': start_mongo } mdb = NativeInterface() cst = CustomModels() model_data_arr = get_all_models_meta_data(mdb, cst) dbw = DatabaseWrapper() for db_alias in config['integrations']: dbw.setup_integration(db_alias) dbw.register_predictors(model_data_arr) for broken_name in [name for name, connected in dbw.check_connections().items() if connected is False]: log.error(f'Error failed to integrate with database aliased: {broken_name}') for api_name, api_data in apis.items(): if api_data['started']: continue
def __init__(self, config): self.config = Config() self.mindsdb_native = NativeInterface() self.custom_models = CustomModels() self.ai_table = AITable_store() self.default_store = DataStore()