inter = lin.intercept_ # y_pre = lin.predict(XX) # score = lin.score(XX, Y) y_pre = cross_val_predict(lin, XX, Y, cv=5) score = cross_val_score(lin, XX, Y, cv=5, scoring='neg_mean_absolute_error').mean() # label = "Space Group: %s\n" % i + "MAE(CV): %.2f" % abs(score) import matplotlib.pyplot as plt p = BasePlot(font=None) def scatter(y_true, y_predict, strx='y_true', stry='y_predicted', label=""): x, y = y_true, y_predict fig = plt.figure() ax = fig.add_subplot(111) ax.scatter(x, y, marker='o', s=70, alpha=0.8, c='orange',
re_Tree=0, random_state=0, verbose=True, add_coef=True, inter_add=True, cal_dim=False, inner_add=True, personal_map=False) # sl.fit() print(sl.expr) y_pre = sl.predict(X) score_all = sl.score(X, y, "r2") p = BasePlot(font=None) p.scatter(y, y_pre, strx='Experimental $E_{gap}$', stry='Calculated $E_{gap}$') import matplotlib.pyplot as plt plt.show() y_pre = sl.predict(X_test) score_test = sl.score(X_test, y_test, "r2") p = BasePlot(font=None) p.scatter(y_test, y_pre, strx='Experimental $E_{gap}$',
stats=None, verbose=True, migrate_prob=0, tq=True, store=True, personal_map=False, stop_condition=None, details=False, classification=False, score_object="y", ) est_gp.fit(gd, datav2, categories=("Mul", "Div", "Add", "exp"), power_categories=(0.5, 2)) e = est_gp.loop.top_n(100, ascending=True) # x0 = data2[:, 0] x1 = data2[:, 1] y = data2[:, 2] pre_y = 16.18 * np.exp(-0.46792396 * x1 - 1.5177372 * x1 / (-0.1052 * x0 - 9.002 * x1)) - 2.694 from mgetool.show import BasePlot bp = BasePlot() plt = bp.scatter_45_line(y, pre_y) plt.show()
out_add=True, cal_dim=True, vector_add=True, personal_map=False) sl.fit() score = sl.score(x, y, "r2") print(i, sl.expr) y_pre = sl.predict(x) # break y_pre = si_transformer.scale_y * y_pre ssc = Dim.inverse_convert(y_dim, target_units=eV)[0] y_pre = y_pre * ssc p = BasePlot(font=None) p.scatter(Y, y_pre, strx='Experimental $E_{gap}$', stry='Calculated $E_{gap}$') import matplotlib.pyplot as plt plt.show() from sklearn.linear_model import LinearRegression lin = LinearRegression() XX = np.vstack(( X[:, 1]**0.333, X[:, 24] / (X[:, 22]**0.333 + X[:, 23]**0.333),
tq=True, store=True, personal_map=False, stop_condition=None, details=False, classification=False, score_object="y", ) # est_gp.fit(gd, datav2,categories=("Mul", "Div", "Add", "exp"),) # e = est_gp.loop.top_n(100, ascending=True) x0 = data2[:, 0] x1 = data2[:, 1] y = data2[:, 2] pre_y = 6.589 * np.exp(-0.10110812 * x1 + 0.11520935 * x1 / (0.001157 * x0 + 0.3287 * x1)) + 2.031 from mgetool.show import BasePlot bp = BasePlot() plt = bp.scatter_45_line(y, pre_y, strx='Real Target', stry='Predict Target') # plt.show() plt.savefig("total.pdf") data2 = np.concatenate((data2, pre_y.reshape(-1, 1)), axis=1) data2 = pd.DataFrame(data2, columns=["x0", "x1", "real", "predict"]) r2_s = r2_score(y, pre_y) MSE2_s = mean_squared_error(y, pre_y)
clf = Exhaustion(estimator, n_select=n_select, muti_grade=2, muti_index=[2, X.shape[1]], must_index=None, n_jobs=1, refit=True).fit(X, y) name_ = name_to_name(X_frame.columns.values, search=[i[0] for i in clf.score_ex[:10]], search_which=0, return_which=(1, ), two_layer=True) sc = np.array(clf.scatter) for i in clf.score_ex[:]: print(i[1]) for i in name_: print(i) t = clf.predict(X) p = BasePlot() p.scatter(y, t, strx='True $E_{gap}$', stry='Calculated $E_{gap}$') plt.show() p.scatter(sc[:, 0], sc[:, 1], strx='Number', stry='Score') plt.show() store.to_csv(sc, method_name + "".join([str(i) for i in n_select])) store.to_pkl_pd(clf.score_ex, method_name + "".join([str(i) for i in n_select]))