def generate_predictor(self, name: str, from_data: dict, datasource_id, problem_definition_dict: dict, join_learn_process: bool, company_id: int): predictor_record = db.session.query(db.Predictor).filter_by( company_id=company_id, name=name).first() if predictor_record is not None: raise Exception('Predictor name must be unique.') df, problem_definition, _ = self._unpack_old_args( from_data, problem_definition_dict) problem_definition = ProblemDefinition.from_dict(problem_definition) predictor_record = db.Predictor( company_id=company_id, name=name, datasource_id=datasource_id, mindsdb_version=mindsdb_version, lightwood_version=lightwood_version, to_predict=problem_definition.target, learn_args=problem_definition.to_dict(), data={'name': name}) db.session.add(predictor_record) db.session.commit() predictor_id = predictor_record.id p = GenerateProcess(df, problem_definition, predictor_id) p.start() if join_learn_process: p.join() if not IS_PY36: p.close() db.session.refresh(predictor_record)
def learn(self, name: str, from_data: dict, to_predict: str, datasource_id: int, kwargs: dict, company_id: int) -> None: df, problem_definition, join_learn_process = self._unpack_old_args( from_data, kwargs, to_predict) p = LearnProcess(df, ProblemDefinition.from_dict(problem_definition), name, company_id, datasource_id) p.start() if join_learn_process: p.join() if not IS_PY36: p.close()
def learn(self, name: str, from_data: dict, to_predict: str, datasource_id: int, kwargs: dict, company_id: int, delete_ds_on_fail: Optional[bool] = False) -> None: predictor_record = db.session.query(db.Predictor).filter_by( company_id=company_id, name=name).first() if predictor_record is not None: raise Exception('Predictor name must be unique.') df, problem_definition, join_learn_process = self._unpack_old_args( from_data, kwargs, to_predict) problem_definition = ProblemDefinition.from_dict(problem_definition) predictor_record = db.Predictor( company_id=company_id, name=name, datasource_id=datasource_id, mindsdb_version=mindsdb_version, lightwood_version=lightwood_version, to_predict=problem_definition.target, learn_args=problem_definition.to_dict(), data={'name': name}) db.session.add(predictor_record) db.session.commit() predictor_id = predictor_record.id p = LearnProcess(df, problem_definition, predictor_id, delete_ds_on_fail) p.start() if join_learn_process: p.join() if not IS_PY36: p.close() db.session.refresh(predictor_record) data = {} if predictor_record.update_status == 'available': data['status'] = 'complete' elif predictor_record.json_ai is None and predictor_record.code is None: data['status'] = 'generating' elif predictor_record.data is None: data['status'] = 'editable' elif 'training_log' in predictor_record.data: data['status'] = 'training' elif 'error' not in predictor_record.data: data['status'] = 'complete' else: data['status'] = 'error'
def generate_predictor(self, name: str, from_data: dict, datasource_id, problem_definition_dict: dict, join_learn_process: bool, company_id: int): df, problem_definition, _ = self._unpack_old_args( from_data, problem_definition_dict) p = GenerateProcess(df, ProblemDefinition.from_dict(problem_definition), name, company_id, datasource_id) p.start() if join_learn_process: p.join() if not IS_PY36: p.close()
def run_generate(df: DataFrame, problem_definition: ProblemDefinition, name: str, company_id: int, datasource_id: int) -> int: json_ai = lightwood.json_ai_from_problem(df, problem_definition) code = lightwood.code_from_json_ai(json_ai) predictor_record = db.Predictor( company_id=company_id, name=name, json_ai=json_ai.to_dict(), code=code, datasource_id=datasource_id, mindsdb_version=mindsdb_version, lightwood_version=lightwood_version, to_predict=[problem_definition.target], learn_args=problem_definition.to_dict(), data={'name': name} ) db.session.add(predictor_record) db.session.commit()
def learn(self, name: str, from_data: dict, to_predict: str, dataset_id: int, kwargs: dict, company_id: int, delete_ds_on_fail: Optional[bool] = False) -> None: predictor_record = db.session.query(db.Predictor).filter_by(company_id=company_id, name=name).first() if predictor_record is not None: raise Exception('Predictor name must be unique.') df, problem_definition, join_learn_process, json_ai_override = self._unpack_old_args(from_data, kwargs, to_predict) if 'url' in problem_definition: train_url = problem_definition['url'].get('train', None) predict_url = problem_definition['url'].get('predict', None) com_format = problem_definition['format'] predictor_record = db.Predictor( company_id=company_id, name=name, dataset_id=dataset_id, mindsdb_version=mindsdb_version, lightwood_version=lightwood_version, to_predict=problem_definition['target'], learn_args=ProblemDefinition.from_dict(problem_definition).to_dict(), data={'name': name, 'train_url': train_url, 'predict_url': predict_url, 'format': com_format, 'status': 'complete' if train_url is None else 'training'}, is_custom=True, # @TODO: For testing purposes, remove afterwards! dtype_dict=json_ai_override['dtype_dict'], ) db.session.add(predictor_record) db.session.commit() if train_url is not None: p = LearnRemoteProcess(df, predictor_record.id) p.start() if join_learn_process: p.join() if not IS_PY36: p.close() db.session.refresh(predictor_record) return problem_definition = ProblemDefinition.from_dict(problem_definition) predictor_record = db.Predictor( company_id=company_id, name=name, dataset_id=dataset_id, mindsdb_version=mindsdb_version, lightwood_version=lightwood_version, to_predict=problem_definition.target, learn_args=problem_definition.to_dict(), data={'name': name}, ) db.session.add(predictor_record) db.session.commit() predictor_id = predictor_record.id p = LearnProcess(df, problem_definition, predictor_id, delete_ds_on_fail, json_ai_override) p.start() if join_learn_process: p.join() if not IS_PY36: p.close() db.session.refresh(predictor_record)