# return tf.reduce_mean(tf.pow(y_true - x_out, 2))+tf.abs(var-0.5)*1e-4/(var+1e-5) # return tf.losses.softmax_cross_entropy(y_true,x_out) return tf.losses.sigmoid_cross_entropy(y_true, x_out) #loss1 # mean, var = tf.nn.moments(tf.reshape(x_out, [-1]), axes=[0]) # return tf.losses.sigmoid_cross_entropy(y_true,x_out) + 1e-4/(var+1e-5) # return tf.reduce_mean(tf.keras.losses.categorical_crossentropy(y_true,x_out)) # return tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits = x_out , labels = y_true)) conv = ng.Model(data_provider=dp, optimizer=tf.train.AdamOptimizer, loss=loss, restore=restore, model_add='./model/1', arch=arch) #for i in range(1): # print( 'set ' + str(i+1) + '/1') # conv.train(data_provider=dp,training_epochs=2, # iterations=100, n_s=100, # learning_rate = 0.001, verbose=1) # for _ in range(5): # x,y = dp(1) # pred = conv.predict(x) # print(np.mean(x),np.argmax(y),np.argmax(pred)) # #conv.train(data_provider=dp,training_epochs=2,
return ccg.filters(x, edd_method='sch') dp = DataProvider(x_files, y_files, alpha, nx=nx, ny=ny, n_buffer=2, reload_rate=10000, filt=filtf) model_add = './models/' + str(n_layers) + '_layers_f' + filt + '/' res_dir = './results/' + str(n_layers) + '_layers_f' + filt + '/' ccg.ch_mkdir(res_dir + 'plots') model = ng.Model(dp, restore=0, model_add=model_add + str(0), arch=arch) print('# of variables:', model.n_variables) if os.path.exists(res_dir + 'info.npy'): i, dp.alpha, dalpha, learning_rate = np.load(res_dir + 'info.npy') i = int(i) model.model_add = model_add + str(i) print('Loading model ' + str(i) + ' ...') model.restore() else: i = 0 for _ in range(ntry): alphas.append(dp.alpha)
#learning_rate = 5e-6 if qq0 == 0: model_add_now = model0_add else: model_add_now = model_add exec('from arch_' + arch + ' import architecture') def arch(x_in): return architecture(x_in, trainable=0) restore = 1 conv = ng.Model(data_provider=dpt, restore=restore, model_add=model_add_now, arch=arch) print(conv.model_add) conv.model_add = model_add print(conv.model_add) print('') print('Number of trainable variables: {:d}'.format(conv.n_variables)) #nqq = qq0+1 for qq in range(qq0, nqq): print(qq) the_print('ROUND: ' + str(qq) + ', learning rate=' + str(learning_rate), bgc='blue') conv.train(data_provider=dpt,
try: qq0, learning_rate = np.load(res_file + '_prop.npy') except: qq0 = np.load(res_file + '_prop.npy') learning_rate = learning_rate / (decay)**qq0 qq0 = int(qq0) qq0 = qq0 + 1 print('The learning will begin from {} q number.'.format(qq0)) #exit() ns = 10 #learning_rate = 5e-6 restore = qq0 != 0 conv = ng.Model(data_provider=dpt, restore=restore, model_add=model_add, arch='arch_' + args.arch) print('') print('Number of trainable variables: {:d}'.format(conv.n_variables)) nqq = qq0 + 1 for qq in range(qq0, nqq): print(qq) the_print('ROUND: ' + str(qq) + ', learning rate=' + str(learning_rate), bgc='blue') conv.train(data_provider=dpt, training_epochs=5, iterations=20, n_s=ns, learning_rate=learning_rate,
#fig, (ax1,ax2)= plt.subplots(ncols=2, nrows=1, figsize=(20, 10)) #ax1.imshow(x[0,:,:,0]) #ax1.axis('off') #ax2.imshow(y[0,:,:,0]) #ax2.axis('off') def arch(x_in): x_out = architecture(x_in=x_in, n_layers=5, res=2) return x_out model = ng.Model(nx=nside, ny=nside, n_channel=1, n_class=1, restore=0, model_add='./model/' + str(0) + '_' + str(dp.alpha), arch=arch) print('# of variables:', model.n_variables) alphas = [] dalpha = 0.05 p_move = 0 for i in range(50): model.train(data_provider=dp, training_epochs=10,