def train(ftrain, ftest, epochs, savemodel, saveloss, savetest): X, y = load2d(ftrain, test=False) net3 = create_network(epochs) net3.fit(X, y) sys.setrecursionlimit(1500000) with open(savemodel, 'wb') as f: pickle.dump(net3, f, -1) draw_loss_2(net3, saveloss) test(net3, ftest, savetest)
def fine_tune(fmodel, ftrain, epochs, ftest, savemodel, saveloss, savetest): X1, y1 = load2d(ftrain, test=False) listFrozens = [] #['conv1','conv2','conv3'] newlayers = set_weights(fmodel, frozen=False, listLayers=listFrozens) net2 = build_model1(newlayers, epochs) net2.fit(X1, y1) sys.setrecursionlimit(1500000) with open(savemodel, 'wb') as f: pickle.dump(net2, f, -1) draw_loss_2(net2, saveloss) test(net2, ftest, savetest)
def test(net, ftest, fsave): X, _ = load2d(ftest, test=True) y_pred = net.predict(X) fig = pyplot.figure(figsize=(18, 16)) fig.subplots_adjust(left=0, right=1, bottom=0, top=1, hspace=0.05, wspace=0.05) for i in range(16): ax = fig.add_subplot(4, 4, i + 1, xticks=[], yticks=[]) plot_sample(X[i], y_pred[i], ax) fig.savefig(fsave, dpi=90) pyplot.close(fig)
FSAVEFOLDER = '/data3/linhlv/OUTPUT2/2018/tete/fine_tuning/run_test_10landmarks/' filename = FSAVEFOLDER + 'landmarks/cnnmodel_10_output_fine_tuning_unfreeze_' #.txt FSAVEIMAGES = FSAVEFOLDER + 'images/' DATA = ['v10', 'v11', 'v12', 'v14', 'v15', 'v16', 'v17', 'v18', 'v19'] for i in DATA: fmodelf = FMODEL + i + '.pickle' ftestf = FTEST + i + '.csv' flandmarks = filename + i + '.txt' net = None sys.setrecursionlimit(100000) with open(fmodelf, 'rb') as f: net = pickle.load(f) X, _ = load2d(ftestf, test=True) y_pred = net.predict(X) # try to display the estimated landmarks on images paths = loadCSV(ftestf) fileNames = extract_fileNames(paths) for i in range(len(y_pred)): predi = y_pred[i] #filename = FSAVEFOLDER + FNAMES[i] write_file(flandmarks, predi) #write_file(filename,"\n") saveImg = FSAVEIMAGES + fileNames[i] fig = pyplot.figure() ax = fig.add_subplot(1, 1, 1, xticks=[], yticks=[]) plot_sample(X[i], predi, ax)
print(model) all_param = lasagne.layers.get_all_param_values(model.layers) net = build_model() lasagne.layers.set_all_param_values(net['output'],all_param,trainable=True) newlayers = lasagne.layers.DenseLayer(net['hidden5'],num_units = 16, nonlinearity=None) #model.layers = newlayers print(model) return model ''' if __name__ == '__main__': # Load data FTRAINF = '/data3/linhlv/pronotum/v1/csv/train_v19.csv' FTESTF = '/data3/linhlv/pronotum/v1/csv/test_v19.csv' X1,y1 = load2d(FTRAINF,test=False) #================================================================= # Load the parameters into list of layer, create a new network and train newlayers = set_weights(FMODEL) net2 = build_fine_tuning_model(newlayers) net2.fit(X1,y1) # Save the fine-tuning model sys.setrecursionlimit(150000) with open('/data3/linhlv/2018/saveModels/cnnmodel_all_10000_pronotum_fine_tune_v19.pickle','wb') as f: pickle.dump(net2,f,-1) # draw the loss draw_loss(net2)