def taste(): logDir = "../log" resetLog = False if resetLog: if tf.gfile.Exists(logDir): tf.gfile.DeleteRecursively(logDir) model = Model_C(28, 28, 1, nbCategories=2, favoritism=(1, 5)) model.verbose = True now = time.time() summary_writer = tf.summary.FileWriter(logDir + "/" + str(now), model.sess.graph) lossTrain = [] lossValid = [] accuracyTrain = [] accuracyValide = [] try: nbStep = 2000 for itr in range(nbStep): print("itr:", itr) X, Y = batchOneVignette(50) model.fit(X, Y) lossTrain.append(model.loss) accuracyTrain.append(model.accuracy) summary_writer.add_summary(model.summary, global_step=itr) if itr % 20 == 0: print("\nVALIDATION") X, Y = batchOneVignette(50) model.validate(X, Y) lossValid.append(model.loss) accuracyValide.append(model.accuracy) print("\n") else: lossValid.append(np.nan) accuracyValide.append(np.nan) except KeyboardInterrupt: print("on a stoppé") X, Y = batchOneVignette(16) hat_Y, hat_Y_proba = model.predict(X) draw16imgs(Y, "Y") draw16imgs(hat_Y, "hat Y") plt.show() model.close()
def tasteClassif(): logDir = "../log" reset = True if reset and tf.gfile.Exists(logDir): tf.gfile.DeleteRecursively(logDir) img_size = 28 multi_label = False if multi_label: dataDealer = lambda batchSize: batchSeveralVignette(batchSize) nbCat = 3 else: dataDealer = lambda batchSize: batchOneVignette(batchSize) nbCat = 3 #Xs, Ys_cat,Ys_background=dataDealer(1) #print("Ys_cat.shape:",Ys_cat.shape) model = Model_A(multi_label, img_size, img_size, 1, nbCat) model.verbose = True batchSize = 50 model.learning_rate = 1e-3 now = time.time() summary_writer = tf.summary.FileWriter(logDir + "/" + str(now), model.sess.graph) nbStep = 2000 lossTrain = [] lossValid = [] accuracyTrain = [] accuracyValide = [] try: for itr in range(nbStep): print("itr:", itr) X, Y = dataDealer(batchSize) model.fit(X, Y) lossTrain.append(model.loss) accuracyTrain.append(model.accuracy) summary_writer.add_summary(model.summary, global_step=itr) if itr % 20 == 0: print("\nVALIDATION") X, Y = dataDealer(batchSize) model.validate(X, Y) lossValid.append(model.loss) accuracyValide.append(model.accuracy) print("\n") else: lossValid.append(np.nan) accuracyValide.append(np.nan) except KeyboardInterrupt: print("on a stoppé") plt.subplot(1, 2, 1) plt.plot(accuracyTrain, label="train") plt.plot(accuracyValide, '.', label="valid") plt.title("accuracy") plt.legend() plt.subplot(1, 2, 2) plt.plot(lossTrain, label="train") plt.plot(lossValid, '.', label="valid") plt.title("loss") plt.legend() """ TEST""" X, Y = dataDealer(16) hat_Y = model.predict(X) if multi_label: subTitles = (hat_Y > 0.5).astype(np.int32) else: subTitles = list(zip(np.argmax(Y, axis=1), np.argmax(hat_Y, axis=1))) print(subTitles) draw16imgs(X, "X", 0, 1, cmap="gray", subTitles=subTitles) plt.show() model.close()
plt.title(str(label)) plt.show() if __name__=="__main__": #X,Y=batchOneVignette(4) X,Y=batchSeveralVignette(16) print("X.shape",X.shape) print("Y.shape",Y.shape) print(Y[0,:]) print(Y[1, :]) print(Y[2, :]) print(Y[3, :]) draw16imgs(X,subTitles=Y) plt.show()
Y = Conv2D(1, 1)(Y) return Y import tensorflow as tf if __name__ == '__main__': patch_size = 32 X, Y = None, None for X, Y in dataGenerator_croix_sur_ile(batch_size=16, patch_size=patch_size): break print("X.shape:", X.shape) print("Y.shape:", Y.shape) _X = tf.constant(X) _out = Model_interpolation_2D(patch_size=patch_size, dropout=1)(_X) print("out.get_shape():", _out.get_shape()) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) out = sess.run(_out) print("out.shape:", out.shape) draw16imgs(out[:, :, :, 0]) #get_keras_model(32, dropout= 0.2)
def taste(): logDir = "../log" reset = True if reset and tf.gfile.Exists(logDir): tf.gfile.DeleteRecursively(logDir) img_size = 28 nbStrates = 6 """attention Y est d'ordre 4 : chaque strate représentant une catégorie = une instance ou le background""" dataDealer = lambda batchSize: oneBatch_of_rect_stratify( either__any_disjoint_separated=0, img_size=img_size, batchSize=16, nbRectPerImg=2, deltaRange=(8, 12), nbStrates=nbStrates) """ img """ nbChannel = 1 model = Model_G(img_size, img_size, nbChannel, nbStrates, 2, 32) #model=Model_fullyConv_regCat(img_size,img_size,nbChannel,nbStrates,None,30,60) model.verbose = True model.nbConsecutiveOptForOneFit = 1 batchSize = 50 model.learning_rate = 1e-2 now = time.time() summary_writer_train = tf.summary.FileWriter(logDir + "/" + str(now), model.sess.graph) nbStep = 2000 lossTrain = [] lossValid = [] try: for itr in range(nbStep): print("itr:", itr) X, Y_cat, Y_background = dataDealer(batchSize) model.fit(X, Y_cat, Y_background, itr) lossTrain.append(model.loss) summary_writer_train.add_summary(model.summary, global_step=itr) if itr % 20 == 0: print("\nVALIDATION") X, Y_cat, Y_background = dataDealer(batchSize) model.validate(X, Y_cat, Y_background, itr) lossValid.append(model.loss) print("\n") else: lossValid.append(model.loss) except KeyboardInterrupt: print("on a stoppé") plt.subplot(1, 2, 2) plt.plot(lossTrain, label="train") plt.plot(lossValid, '.', label="valid class") plt.title("loss") plt.legend() """ TEST""" X, Y_cat, Y_background = dataDealer(16) hat_Y_cat, hat_Y_cat_sum = model.predict(X) print("hat_Y_cat.shape", hat_Y_cat.shape) draw16imgs(X, "X", vmin=0, vmax=1, cmap="gray") #draw16imgs(hat_Y_background) draw16imgs(hat_Y_cat_sum, "hat cat", 0, nbStrates, "jet") draw16imgs(np.sum(Y_cat, axis=3), "Y_cat", 0, nbStrates, "jet") # draw_proba_Vs_cat=1 # if draw_proba_Vs_cat==0: # for i in range(0,nbCat): # drawOne(hat_Y_test_proba[:, :, :,i],"proba cat:"+str(i-1),1) # else: # drawOne(hat_Y_test_cat[:, :, :], "hat_cat", nbCat) plt.show() model.close()
noise_gauss = np.random.normal(loc=0, scale=0.2, size=[batchSize, img_size, img_size, 1]) #noise_salt= (np.random.random(size=[batchSize, img_size, img_size,1])>0.2) Xs += noise_gauss #Xs[noise_salt]=0 return Xs, Ys_strate, Ys_background if __name__ == "__main__": nbStrates = 10 Xs, Ys_strate, Ys_background = oneBatch_of_rect_stratify( either__any_disjoint_separated=0, img_size=28, batchSize=16, nbRectPerImg=1, deltaRange=(7, 18), nbStrates=nbStrates) draw16imgs(Xs, cmap="gray") draw16imgs(np.sum(Ys_strate, axis=3)) draw16imgs(np.argmax(Ys_background, axis=3)) plt.show()
def tastetRot(): X = np.arange(100).reshape((10, 10)) X_rot = ndi.interpolation.rotate(X, angle=45, reshape=False) X_zoom = ndi.zoom(X, zoom=2) print(X_rot.shape) print(X_zoom.shape) plt.imshow(X_rot) if __name__ == "__main__": xs = [] ys = [] for x, y in oneEpoch(50, False): xs.append(x) ys.append(y) xs = np.array(xs) ys = np.array(ys) print(xs.shape) print(ys.shape) decal = 60 draw16imgs(xs[decal:], cmap="gray") draw16imgs(np.expand_dims(ys[decal:], 3), vmin=0, vmax=3) plt.show()
for i in range(batch_size): img,gt=createImgOneVignette() imgs[i,:,:,0]=img gts[i,:,:]=gt return imgs,gts if __name__=="__main__": X,Y=batchOneVignette(16) draw16imgs(X,"X",cmap="gray") draw16imgs(Y,"Y") plt.show()