X, y = training_generator.__next__() print('X,y created') # zero center images X = np.array(X) #X = X/255 print('X as arrray created') # Fitting the model. Here you can see how the call to model fit works. Note the validation data comes from # preloaded numpy arrays. print('one hot encoding ...') y = to_categorical(y) #if iteration != 0: # model.load_weights(check_point_weights) history = model.fit( [X[0], X[1]], y, batch_size=16, epochs=30, validation_data=([validationLeft, validationRight], validationLabels), callbacks=[checkpointer]) histories.append(history) show(histories, False) #tuned = load_model(check_point_model) #result = tuned.evaluate([validationLeft, validationRight], validationLabels) #print(result)
#Define directories baseDir = r"D:\Arnaud\data_croutinet\ottawa\data" trainDir = os.path.join(baseDir, "train/train.csv") validationDir = os.path.join(baseDir, "validation/validation.csv") base_network_save = os.path.join(baseDir, "scoreNetworkRetrain2.h5") ranking_network_save = os.path.join(baseDir, "rankingNetworkRetrain.h5") base_network_save2 = os.path.join(baseDir, "scoreNetworkRetrain3.h5") #load training and validation set with labels as scalars between 0 and 1 trainLeft, trainRight, trainLabels = loadAsScalars(trainDir) validationLeft, validationRight, validationLabels = loadAsScalars( validationDir) #Here is the architecture of ScoreCroutinet that we create below base_network = load_model(base_network_save) model = create_meta_network(INPUT_DIM, base_network) #We fit the model to the training set history = model.fit([trainLeft, trainRight], trainLabels, batch_size=16, epochs=30, validation_data=([validationLeft, validationRight], validationLabels)) #We show the result and save the network show([history], False) base_network.save(base_network_save2)