def main(): Model = 'four_flux' R_km = 0 R_muf = 0 # load data RealCompose_2 = np.load('RealCompose_2.npy') RealIngredient_2 = np.load('RealIngredint_2.npy') Composes = np.append(RealCompose, RealCompose_2, axis=0) Ingredients = np.append(RealIngredient, RealIngredient_2, axis=0) data_c = np.load('data_c.npy') data_p = np.load('data_p.npy').T #get K/S of base base_f_km = mf.math_model(data_p[0] - R_km, 'km') base_f_muf = mf.math_model(data_p[0] - R_muf, 'four_flux') #get F F_km = get_F(data_p[1:] - R_km, data_c, base_f_km, 'km') F_muf = get_F(data_p[1:] - R_muf, data_c, base_f_muf, 'four_flux') diffs_km = [] diffs_muf = [] for i in range(Composes.shape[0]): PredictIngredint_km = Mix(F_km, base_f_km, Composes[i], 'km') + R_km PredictIngredint_muf = Mix(F_muf, base_f_muf, Composes[i], 'four_flux') + R_muf dif = mf.color_diff(PredictIngredint_km, Ingredients[i]) if dif < 6: diffs_km.append(dif) dif = mf.color_diff(PredictIngredint_muf, Ingredients[i]) if dif < 6: diffs_muf.append(dif) plt.figure() X = np.arange(len(diffs_km)) plt.scatter(X, diffs_km, label='KM') plt.plot(X, np.repeat(sum(diffs_km) / len(diffs_km), len(diffs_km))) plt.text(1, sum(diffs_km) / len(diffs_km), sum(diffs_km) / len(diffs_km)) X = np.arange(len(diffs_muf)) plt.scatter(X, diffs_muf, label='four_flux') plt.plot(X, np.repeat(sum(diffs_muf) / len(diffs_muf), len(diffs_muf))) plt.text(1, sum(diffs_muf) / len(diffs_muf), sum(diffs_muf) / len(diffs_muf)) plt.xlabel('加料方案', fontproperties=font_set) plt.ylabel('色差', fontproperties=font_set) plt.legend() print(matplotlib.get_backend()) plt.show()
def Mix(compose, data_c, data_p): base_f = mf.math_model(data_p[0], 'four_flux') F = np.zeros_like(base_f) for i in range(compose.size): df = (mf.math_model(data_p[i * 3 + 2], 'four_flux') - base_f) / data_c[i][1] F += df * compose[i] return mf.i_math_model(F + base_f, 'four_flux')
def get_dfs_KM(w, data_c, data_p): base_f = mf.math_model(data_p[0], 'four_flux') dfs = [] for i in range(data_c.shape[0]): df = (mf.math_model(data_p[i * 3 + 1], 'four_flux') - base_f - correct_func(data_c[i][0], w)) / data_c[i][0] df += (mf.math_model(data_p[i * 3 + 2], 'four_flux') - base_f - correct_func(data_c[i][1], w)) / data_c[i][1] df += (mf.math_model(data_p[i * 3 + 3], 'four_flux') - base_f - correct_func(data_c[i][2], w)) / data_c[i][2] dfs.append(df / 3) return np.array(dfs)
def plot_ks(id, ax): ax.set_title('id=%d' % id) ks = mf.math_model(data_p[id * 3 + 1:id * 3 + 4], "four_flux") - mf.math_model(data_p[0], "four_flux") c = data_c[id][:, np.newaxis] ks = ks / c c = np.repeat(c, 31, axis=1).reshape(93) #ks = ks.reshape(93) ax.scatter3D(c, Y, ks) ax.set_xlabel('C') ax.set_ylabel('wavelength') ax.set_zlabel('k/s per unit')
def get_total_loss(K1, K2): R = (data_p - K1) / (1 - K1 - K2 + K2 * data_p) F = mf.math_model(R, model) ans = 0 for i in range(31): for j in range(21): ans += get_loss(F[i][3 * j + 1:3 * j + 4] - F[i][0], data_c[j]) return ans
def get_F(p,c,base,Model): ff = mf.math_model(p,Model) - base F = [] sample_size = c.shape[1] for i in range(c.shape[0]): F.append(ff[sample_size*i + int(sample_size/2)] / c[i][int(sample_size/2)]) return np.array(F)
def get_F(p, c, base, Model): ff = mf.math_model(p, Model) F = [] sample_size = c.shape[1] for i in range(c.shape[0]): k = 0.0 for j in range(sample_size): k += ff[sample_size * i + j] / c[i][j] F.append(k / sample_size) return np.array(F)
def get_diffs(model): base = mf.math_model(R[0], model) F = get_F(R[1:], data_c, base, model) diffs = [] for i in range(Composes.shape[0]): PredictIngredint = Mix(F, base, Composes[i], model) dif = mf.color_diff(i_Sanderson(PredictIngredint, k1, k2), Ingredients[i]) if dif < 50: dif = (i_Sanderson(PredictIngredint, k1, k2) - Ingredients[i]) * diff_weight diffs.append(dif) else: print(i) return np.array(diffs)
def main(): # data about km data_c = np.load('data_c.npy') data_p = np.load('data_p.npy').T # get K/S of base base_f_km = mf.math_model(data_p[0], 'km') F_km = get_F(data_p[1:], data_c, base_f_km, 'km') floor = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1000] for x in range(10): test_com = random_com(1)[0] test_ref = Mix(F_km, base_f_km, test_com, 'km') count = np.zeros(10).astype(int) coms = random_com(65536) for c in coms: r = Mix(F_km, base_f_km, c, 'km') dif = mf.color_diff(r, test_ref) for i in range(10): if floor[i] < dif and dif < floor[i + 1]: count[i] += 1 print(count)
def PredictCompose(F, base_f, targetIngredint, Model): target_f = mf.math_model(targetIngredint, Model) - base_f return target_f.dot(F.T).dot(np.linalg.inv(F.dot(F.T)))
def get_F(p, c, base, Model): ff = mf.math_model(p, Model) - base F = [] sample_size = c.shape[1] for i in range(c.shape[0]): F.append(ff[sample_size * i + int(sample_size / 2)] / c[i][int(sample_size / 2)]) return np.array(F) data_c = np.load('data_c.npy') data_p = np.load('data_p.npy').T #get K/S of base base_f_km = mf.math_model(data_p[0], 'km') #get F F_km = get_F(data_p[1:], data_c, base_f_km, 'km') com0 = np.zeros(21).astype(float) Noise = [0.00001, 0.0001, 0.0005, 0.001, 0.005, 0.01, 0.02, 0.03] size = 50000 for noise in Noise: diff = 0 for i in range(size): R0 = np.random.rand(31) diff += mf.color_diff(R0, R0 + noise * np.random.normal(0, 1, 31)) print('mean_diff_with_noise_', noise, ':', diff / size)
def main(): R0 = 0.048 fil = 3 scale = [0.515, 0.515, 0.52, 0.56] w_names = [ 'data_w_size6.npy', 'data_w_size12.npy', 'data_w_size18.npy', 'data_w_size21_v3_no_r0.npy', ] # data_c 浓度数据,21种色浆 * 3次取点 # data_p 分光反射率数据 size: (1 + 21 * 3) * 31 , 1为基底 data_c = np.load('data_c.npy') data_p = np.load('data_p.npy') c, p = mf.data_filiter(filiters[fil], data_c, data_p) p = p.T - R0 # 计算基底K/S base_f = mf.math_model(p[0], 'four_flux') # 获取w,计算单位k/s值 w = np.load(w_names[fil]).T dfs = get_dfs_KM(w, c, p) RealCompose_2 = np.load('RealCompose_2.npy') RealIngredient_2 = np.load('RealIngredint_2.npy') Composes = np.append(RealCompose, RealCompose_2, axis=0) Ingredients = np.append(RealIngredient, RealIngredient_2, axis=0) print('\n using : ', w_names[fil]) Y1 = [] Y2 = [] Y3 = [] #X_ = np.linspace(0,0.1,100) max = 6 for com in range(Composes.shape[0]): compose = mf.data_filiter_(filiters[fil], Composes[com]) if Counter(compose)[0] == 18: y = mf.color_diff(Ingredients[com], Mix(compose, c, p + R0)) if y < max: Y1.append(y) y = mf.color_diff( Ingredients[com], corrected_Mix(np.array([compose]), w, dfs, base_f, scale[fil]) + R0) if y < max: Y2.append(y) y = mf.color_diff(Ingredients[com], Mix(compose, c, p) + R0) if y < max: Y3.append(y) plt.figure() X = np.arange(len(Y1)) plt.scatter(X, Y1, label='four_flux') plt.plot(X, np.repeat(sum(Y1) / len(Y1), len(Y1))) plt.text(1, sum(Y1) / len(Y1), sum(Y1) / len(Y1)) plt.scatter(X, Y3, marker='*', label='corrected_four_flux_1.0') plt.plot(X, np.repeat(sum(Y3) / len(Y3), len(Y3))) plt.text(1, sum(Y3) / len(Y3), sum(Y3) / len(Y3)) plt.scatter(X, Y2, marker='x', label='corrected_four_flux_2.0') plt.plot(X, np.repeat(sum(Y2) / len(Y2), len(Y2))) plt.text(1, sum(Y2) / len(Y2), sum(Y2) / len(Y2)) plt.xlabel('加料方案', fontproperties=font_set) plt.ylabel('色差', fontproperties=font_set) # plt.imshow(Y) # ignore ticks plt.xticks([]) plt.yticks([]) plt.legend() plt.show()
def main(): # data about km data_c = np.load('data_c.npy') data_p = np.load('data_p.npy').T # get K/S of base base_f_km = mf.math_model(data_p[0], 'km') F_km = get_F(data_p[1:], data_c, base_f_km, 'km') # # shu = np.zeros(base_color_num) # shu[0] = 1 # shu[1] = 1 # shu[2] = 1 # np.random.shuffle(shu) # random_com = np.random.rand(base_color_num) * shu # test_ref = Mix(F_km, base_f_km, random_com, 'km') data = np.load('data/data_01.npz') concentrations = torch.from_numpy(data['concentrations']).float() reflectance = torch.from_numpy(data['reflectance']).float() random_com = concentrations[42].cpu().data.numpy() test_ref = reflectance[42].cpu().data.numpy() # data about inn inn = torch.load('model_dir/model_02') y_noise_scale = 1e-4 N_sample = 20000 N_class = 100 dim_x = base_color_num dim_y = reflectance_dim dim_z = 13 dim_total = max(dim_x, dim_y + dim_z) sample,densi = generate_test_sample(test_ref, N_sample, y_noise_scale, dim_x, dim_y, dim_z, dim_total) p_c = predict(0,0,sample,inn) min = 100 index = -1 floor = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1000] count = np.zeros(10).astype(int) for i in range(N_sample): df = mf.color_diff(Mix(F_km,base_f_km,p_c[i],'km'),test_ref) for i in range(10): if floor[i] < df and df < floor[i + 1]: count[i] += 1 if min>df: min = df index = i print(min) print(count) N_sample = 10000 p_c = p_c[:N_sample][:] # p_c = p_c*densi / densi.max() kmeans_model = KMeans(N_class).fit(p_c) labels = kmeans_model.labels_ # using center for c in kmeans_model.cluster_centers_: print('center_color_diff:',mf.color_diff(Mix(F_km,base_f_km,c,'km'),test_ref)) p_c = np.concatenate((p_c, [random_com],kmeans_model.cluster_centers_)) tsne = TSNE(n_components=2) tsne.fit_transform(p_c) res = np.array(tsne.embedding_) d = [] for j in range(N_class): l = [] for i in range(N_sample): if j == labels[i]: l.append(res[i]) d.append(np.array(l).T) fig = plt.figure() # ax = Axes3D(fig) for j in range(N_class): plt.scatter(d[j][0],d[j][1],s=1) plt.scatter(res[index][0],res[index][1],marker='o',c='r',label ='min') plt.scatter(res[N_sample][0], res[N_sample][1], marker='v', c='b',label = 'real') # plot center for i in range(N_class): plt.scatter(res[N_sample+1 + i][0],res[N_sample+1 + i][1],marker='s',c='g') plt.legend() plt.show()