def _write_model_to_db(self, class_path, parameters, feature_names, model_hash, trained_model, model_group_id, model_size, misc_db_parameters): """Writes model and feature importance data to a database Will overwrite the data of any previous versions (any existing model that shares a hash) If the replace flag on the object is set, the existing version of the model will have its non-unique attributes (e.g. timestamps) updated, and feature importances fully replaced. If the replace flag on the object is not set, the existing model metadata and feature importances will be used. Args: class_path (string) A full classpath to the model class parameters (dict) hyperparameters to give to the model constructor feature_names (list) feature names in order given to model model_hash (string) a unique id for the model trained_model (object) a trained model object model_group_id (int) the unique id for the model group model_size (float) the size of the stored model in kB misc_db_parameters (dict) params to pass through to the database """ model_id = retrieve_model_id_from_hash(self.db_engine, model_hash) if model_id and not self.replace: logging.info( 'Metadata for model_id %s found in database. Reusing model metadata.', model_id) return model_id else: model = Model(model_hash=model_hash, model_type=class_path, hyperparameters=parameters, model_group_id=model_group_id, experiment_hash=self.experiment_hash, model_size=model_size, **misc_db_parameters) session = self.sessionmaker() if model_id: logging.info( 'Found model id %s, updating non-unique attributes', model_id) model.model_id = model_id session.merge(model) session.commit() else: session.add(model) session.commit() model_id = model.model_id logging.info('Added new model id %s', model_id) session.close() logging.info('Saving feature importances for model_id %s', model_id) self._save_feature_importances(model_id, get_feature_importances(trained_model), feature_names) logging.info('Done saving feature importances for model_id %s', model_id) return model_id
def _write_model_to_db( self, class_path, parameters, feature_names, model_hash, trained_model, model_group_id, model_size, misc_db_parameters, retrain, ): """Writes model and feature importance data to a database Will overwrite the data of any previous versions (any existing model that shares a hash) If the replace flag on the object is set, the existing version of the model will have its non-unique attributes (e.g. timestamps) updated, and feature importances fully replaced. If the replace flag on the object is not set, the existing model metadata and feature importances will be used. Args: class_path (string) A full classpath to the model class parameters (dict) hyperparameters to give to the model constructor feature_names (list) feature names in order given to model model_hash (string) a unique id for the model trained_model (object) a trained model object model_group_id (int) the unique id for the model group model_size (float) the size of the stored model in kB misc_db_parameters (dict) params to pass through to the database """ model_id = retrieve_model_id_from_hash(self.db_engine, model_hash) if model_id and not self.replace and not retrain: logger.notice( f"Metadata for model {model_id} found in database. Reusing model metadata." ) return model_id else: if retrain: logger.debug("Retrain model...") model = Model( model_group_id=model_group_id, model_hash=model_hash, model_type=class_path, hyperparameters=parameters, # built_by_retrain=self.experiment_hash, built_in_triage_run=self.run_id, model_size=model_size, **misc_db_parameters, ) else: model = Model( model_hash=model_hash, model_type=class_path, hyperparameters=parameters, model_group_id=model_group_id, # built_by_experiment=self.experiment_hash, built_in_triage_run=self.run_id, model_size=model_size, **misc_db_parameters, ) session = self.sessionmaker() if model_id: logger.notice( f"Found model {model_id}, updating non-unique attributes") model.model_id = model_id session.merge(model) session.commit() else: session.add(model) session.commit() model_id = model.model_id logger.notice( f"Model {model_id}, not found from previous runs. Adding the new model" ) session.close() logger.spam(f"Saving feature importances for model_id {model_id}") self._save_feature_importances(model_id, get_feature_importances(trained_model), feature_names) logger.debug(f"Saved feature importances for model_id {model_id}") return model_id