def _unlock_predictor(self, id: int) -> None: from mindsdb.interfaces.storage.db import session, Semaphor semaphor_record = session.query(Semaphor).filter_by( entity_id=id, entity_type='predictor').first() if semaphor_record is not None: session.delete(semaphor_record) session.commit()
def start_analysis(self, name): datasource_record = session.query(Datasource).filter_by( company_id=self.company_id, name=name).first() if datasource_record.analysis is not None: return None semaphor_record = session.query(Semaphor).filter_by( company_id=self.company_id, entity_id=datasource_record.id, entity_type='datasource').first() if semaphor_record is None: semaphor_record = Semaphor(company_id=self.company_id, entity_id=datasource_record.id, entity_type='datasource', action='write') session.add(semaphor_record) session.commit() else: return try: analysis = self.mindsdb_native.analyse_dataset( self.get_datasource_obj(name, raw=True)) datasource_record = session.query(Datasource).filter_by( company_id=self.company_id, name=name).first() datasource_record.analysis = json.dumps(analysis) session.commit() except Exception as e: log.error(e) finally: semaphor_record = session.query(Semaphor).filter_by( company_id=self.company_id, entity_id=datasource_record.id, entity_type='datasource').first() session.delete(semaphor_record) session.commit()
def delete_model(self, name): predictor_record = Predictor.query.filter_by( company_id=self.company_id, name=name, is_custom=False).first() id = predictor_record.id session.delete(predictor_record) session.commit() F.delete_model(name) self.dbw.unregister_predictor(name) self.fs_store.delete(f'predictor_{self.company_id}_{id}')
def delete_file(self, name, company_id): file_record = session.query(File).filter_by(company_id=company_id, name=name).first() if file_record is None: return None file_id = file_record.id session.delete(file_record) session.commit() self.fs_store.delete(f'file_{company_id}_{file_id}') return True
def delete_datasource(self, name): datasource_record = Datasource.query.filter_by(company_id=self.company_id, name=name).first() id = datasource_record.id session.delete(datasource_record) session.commit() self.fs_store.delete(f'datasource_{self.company_id}_{datasource_record.id}') try: shutil.rmtree(os.path.join(self.dir, name)) except Exception: pass
def delete_datasource(self, name, company_id=None): datasource_record = Datasource.query.filter_by(company_id=company_id, name=name).first() if not Config()["force_datasource_removing"]: linked_models = Predictor.query.filter_by(company_id=company_id, datasource_id=datasource_record.id).all() if linked_models: raise Exception("Can't delete {} datasource because there are next models linked to it: {}".format(name, [model.name for model in linked_models])) session.delete(datasource_record) session.commit() self.fs_store.delete(f'datasource_{company_id}_{datasource_record.id}') try: shutil.rmtree(os.path.join(self.dir, f'{company_id}@@@@@{name}')) except Exception: pass
def delete_model(self, name): from mindsdb_native import F from mindsdb_native.libs.constants.mindsdb import DATA_SUBTYPES from mindsdb.interfaces.storage.db import session, Predictor predictor_record = Predictor.query.filter_by(company_id=self.company_id, name=name, is_custom=False).first() id = predictor_record.id session.delete(predictor_record) session.commit() F.delete_model(name) self.dbw.unregister_predictor(name) self.fs_store.delete(f'predictor_{self.company_id}_{id}') return 0
def remove_db_integration(name, company_id): integration_record = session.query(Integration).filter_by( company_id=company_id, name=name).first() integrations_dir = Config()['paths']['integrations'] folder_name = f'integration_files_{company_id}_{integration_record.id}' integration_dir = os.path.join(integrations_dir, folder_name) if os.path.isdir(integration_dir): shutil.rmtree(integration_dir) try: FsStore().delete(folder_name) except Exception: pass session.delete(integration_record) session.commit()
def start_analysis(self, name, company_id=None): dataset_record = session.query(Dataset).filter_by( company_id=company_id, name=name).first() if dataset_record.analysis_id is not None: return None semaphor_record = session.query(Semaphor).filter_by( company_id=company_id, entity_id=dataset_record.id, entity_type='dataset').first() if semaphor_record is None: semaphor_record = Semaphor(company_id=company_id, entity_id=dataset_record.id, entity_type='dataset', action='write') session.add(semaphor_record) session.commit() else: return try: analysis = self.model_interface.analyse_dataset( ds=self.get_datasource_obj(name, raw=True, company_id=company_id), company_id=company_id) dataset_record = session.query(Dataset).filter_by( company_id=company_id, name=name).first() analysis_record = Analysis( analysis=json.dumps(analysis, cls=CustomJSONEncoder)) session.add(analysis_record) session.flush() dataset_record.analysis_id = analysis_record.id session.commit() except Exception as e: log.error(e) finally: semaphor_record = session.query(Semaphor).filter_by( company_id=company_id, entity_id=dataset_record.id, entity_type='dataset').first() session.delete(semaphor_record) session.commit()
def run_learn(df: DataFrame, problem_definition: ProblemDefinition, predictor_id: int, delete_ds_on_fail: Optional[bool] = False) -> None: try: run_generate(df, problem_definition, predictor_id) run_fit(predictor_id, df) except Exception as e: predictor_record = Predictor.query.with_for_update().get(predictor_id) if delete_ds_on_fail is True: linked_db_ds = Datasource.query.filter_by( id=predictor_record.datasource_id).first() if linked_db_ds is not None: predictors_with_ds = Predictor.query.filter( (Predictor.id != predictor_id) & (Predictor.datasource_id == linked_db_ds.id)).all() if len(predictors_with_ds) == 0: session.delete(linked_db_ds) predictor_record.datasource_id = None predictor_record.data = {"error": str(e)} session.commit()