def deserialize_ridge_regressor(model_dict): model = Ridge(model_dict["params"]) model.coef_ = np.array(model_dict["coef_"]) if "n_iter_" in model_dict: model.n_iter_ = np.array(model_dict["n_iter_"]) if isinstance(model_dict["intercept_"], list): model.intercept_ = np.array(model_dict["intercept_"]) else: model.intercept_ = float(model_dict["intercept_"]) return model
def deserialize_ridge_regressor(model_dict): model = Ridge(model_dict['params']) model.coef_ = np.array(model_dict['coef_']) if 'n_iter_' in model_dict: model.n_iter_ = np.array(model_dict['n_iter_']) if isinstance(model_dict['intercept_'], list): model.intercept_ = np.array(model_dict['intercept_']) else: model.intercept_ = float(model_dict['intercept_']) return model
def explain_instance(self, neighborhood_data, neighborhood_weights, neighborhood_labels, label, num_features, feature_selection, simple_model=None): if simple_model is None: simple_model = Ridge(alpha=1, fit_intercept=True, random_state=self.random_state) assert type( simple_model ) in BaseLIME.MODELS, f'Invalid simple_model type choose from: {BaseLIME.MODELS}' if simple_model.random_state != self.random_state: warnings.warn( 'random_state of a simple_model is not equal to LIME random_state!' ) labels_column = neighborhood_labels[:, label] used_features = self._feature_selection(neighborhood_data, labels_column, neighborhood_weights, num_features, feature_selection) # makes sure that weights are not passed in multiclass classification simple_model = clone(simple_model) simple_model.fit(neighborhood_data[:, used_features], labels_column, sample_weight=neighborhood_weights) prediction_score = simple_model.score( neighborhood_data[:, used_features], labels_column, sample_weight=neighborhood_weights) local_pred = simple_model.predict( neighborhood_data[0, used_features].reshape(1, -1)) if type(simple_model) in [ DecisionTreeRegressor, RandomForestRegressor ]: feature_importance = sorted(zip(used_features, simple_model.feature_importances_), key=lambda x: np.abs(x[1]), reverse=True) simple_model.intercept_ = np.zeros(neighborhood_labels.shape[1]) else: feature_importance = sorted( zip(used_features, simple_model.coef_), # key=lambda x: np.abs(x[1]), reverse=True) # Changed to take into account only positive coefficients key=lambda x: x[1], reverse=True) return (simple_model.intercept_, feature_importance, prediction_score, local_pred)
model.addLayer(DenseLayer(7, LinearActivation())) model.initialize(QuadraticCost()) ridge = Ridge(alpha=0.1) # load model parameters path1 = './model/my_model_W_{}.dat' path2 = './model/my_model_b_{}.dat' path3 = './model/my_model_ridge_W.dat' path4 = './model/my_model_ridge_intercept.dat' model_W, model_b, model_ridge_W, model_ridge_intercept = load_model( 4, path1, path2, path3, path4) # update the model for i in range(len(model.layers)): model.layers[i].W = model_W[i] model.layers[i].b = model_b[i] ridge.coef_ = model_ridge_W ridge.intercept_ = model_ridge_intercept[0] # get the data point inputlist = ball_tracking() # predict the configuration tar_position_in_list = list(ridge.predict(model.predict(inputlist))) for i in range(len(joint_names)): tar_position[joint_names[i]] = tar_position_in_list[i] while True: cur_pos = limb.joint_angles() for key, value in tar_position.iteritems(): error_new[key] = tar_position[key] - cur_pos[key] derror[key] = error_new[key] - error_old[key]