#import re
from time import time
from support import ids_dataset
from support import ids

scan_type = 'B'
#scan_type = 'T'
path_data = '/home/lfabbrini/data'
dataset_dir = 'NN_PNGrgbSScan_bal_m_wD_TgU_wUnkGT_P0e001__NAcq40_Tex4_201831211597/STC'
model_dir = 'mdl_tr80val10te10fs1_0001'
#model_dir = 'mdl_debug'
grayscale = False

filename = scan_type + '_sublist_train_and_test.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_train, y_train) = ids_dataset.load_data(filelist)

filename = scan_type + '_sublist_train_and_test_mean.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_mu, y_mu) = ids_dataset.load_data(filelist)

filename = scan_type + '_sublist_val.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_test, y_test) = ids_dataset.load_data(filelist)

#filename = scan_type+'_sublist_val.npz'
#filelist = os.path.join(path_data,dataset_dir,model_dir,filename)
#(x_val,y_val) = ids_dataset.load_data(filelist)

#%%Debug
plt.imshow(x_train[0])
model_dir = 'mdl_tr80val10te10fs1_tex1_0001_dz1_dz1'  #downsampling_xyz = [1,1,1]
#model_dir = 'mdl_tr80val10te10fs1_tex1_0002_dz1_dz6' #downsampling_xyz = [1,1,6]
#model_dir = 'mdl_debug'

conv_type = '3D'  #2D,2Dsep
#conv_type='2D'#2D,2Dsep
#conv_type='2Dsep'#2D,2Dsep
stacked_scan = 9
downsampling_xyz = [1, 1, 6]
filename = data_type + '_sublist_train_mean.hdf5'
#filename = data_type+'_sublist_train_mean_FA.hdf5'
file_to_mean = os.path.join(path_data, dataset_dir, model_dir, filename)

#Debug
hdf5_format = True
(x_mu, y_mu) = ids_dataset.load_data(file_to_mean, hdf5_format)

#%%Preprocessing  To have same PNG performance (OBS: z have the double of sample than .png image in older dataset)
from functools import partial
#clip data to x_sat,-x_sat
#input are in [0,1] representing value in [-10,10]
#map x_sat from [-10,10] to [0,1]
x_sat_h = 0.5
x_sat_l = -0.5
x_sat_h = (x_sat_h + 10) / 20
x_sat_l = (x_sat_l + 10) / 20

clip_ = partial(ids.clip, x_min=x_sat_l, x_max=x_sat_h)
linearmap_ = partial(ids.linearmap,
                     x_min=x_sat_l,
                     x_max=x_sat_h,
#path_data = '/home/lfabbrini/data'
#path_data = '/media/sf_share/'
path_data = 'c:/Users/l.fabbrini/share/'
dataset_dir = 'NN_HDF5vol_0e75m_ext0e30_P0e001__NAcq40_Tex0_2018410112537/STC'
#model_dir = 'mdl_tr70val15te15fs1_tex3_0001'
#model_dir = 'mdl_tr80val10te10fs1_tex1_0001_dz1_dz1' #downsampling_xyz = [1,1,1]
model_dir = 'mdl_tr80val10te10fs1_tex1_0002_dz1_dz6'  #downsampling_xyz = [1,1,6]
#model_dir = 'mdl_debug'
grayscale = False
hdf5_format = True
max_value = 1

filename_base = data_type + '_sublist_test'
filename_h5 = filename_base + '.hdf5'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename_h5)
(x_val, y_val) = ids_dataset.load_data(filelist, hdf5_format)

filename = scan_type + '_sublist_train_mean.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_mu, y_mu) = ids_dataset.load_data(filelist, hdf5_format)

x_val = (x_val - x_mu) / max_value
#%%Load model
from keras.models import load_model
modelweight_dir = 'C:/Users/l.fabbrini/spyder/hdf5/'
#modelweight_dir = 'C:/Users/l.fabbrini/spyder/png/'
#modelweight_to_load = 'ids_Bscan0e01sgd1521021533.h5'
#modelweight_to_load = 'ids_Bscan0e01sgd1521101393.h5'
#modelweight_to_load = 'ids_Cscan0e001sgd1521723396.h5'
#modelweight_to_load = 'ids_Cscan0e001sgd1521723396-05-0.95.hdf5'
#modelweight_to_load = 'ids_Cscan0e01sgd1521802004-10-0.93.hdf5'
import os
#import re
from time import time
from support import ids_dataset
from support import ids

scan_type = 'B'
path_data = '/home/lfabbrini/data'
dataset_dir = 'NN_PNGrgbSScan_bal_m_wD_TgU_wUnkGT_P0e001__NAcq40_Tex4_201831211597/STC'
model_dir = 'mdl_tr80val10te10fs1_0001'
#model_dir = 'mdl_debug'
grayscale = False

filename = scan_type + '_sublist_train_and_test.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_train_B, y_train_B) = ids_dataset.load_data(filelist)

filename = scan_type + '_sublist_val.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_test_B, y_test_B) = ids_dataset.load_data(filelist)

scan_type = 'T'
filename = scan_type + '_sublist_train_and_test.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_train_T, y_train_T) = ids_dataset.load_data(filelist)

filename = scan_type + '_sublist_val.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_test_T, y_test_T) = ids_dataset.load_data(filelist)

x_train = np.concatenate((x_train_B, x_train_T), axis=0)
Example #5
0
#path_data = '/home/lfabbrini/data'
#path_data = '/media/sf_share/'
path_data = 'c:/Users/l.fabbrini/share/'
dataset_dir = 'NN_HDF5vol_0e75m_ext0e30_P0e001__NAcq40_Tex0_2018410112537/STC'
#model_dir = 'mdl_tr70val15te15fs1_tex3_0001'
#model_dir = 'mdl_tr80val10te10fs1_tex1_0001_dz1_dz1' #downsampling_xyz = [1,1,1]
model_dir = 'mdl_tr80val10te10fs1_tex1_0002_dz1_dz6' #downsampling_xyz = [1,1,6]
#model_dir = 'mdl_debug'
grayscale=False
hdf5_format=True
max_value = 1


filename = data_type+'_sublist_train.hdf5'
filelist = os.path.join(path_data,dataset_dir,model_dir,filename)
(x_train,y_train) = ids_dataset.load_data(filelist,hdf5_format)


filename = data_type+'_sublist_train_mean.hdf5'
#filename = data_type+'_sublist_train_mean_FA.hdf5'
filelist = os.path.join(path_data,dataset_dir,model_dir,filename)
(x_mu,y_mu) = ids_dataset.load_data(filelist,hdf5_format)

filename = data_type+'_sublist_test.hdf5'
filelist = os.path.join(path_data,dataset_dir,model_dir,filename)
(x_test,y_test) = ids_dataset.load_data(filelist,hdf5_format)

filename = data_type+'_sublist_val.hdf5'
filelist = os.path.join(path_data,dataset_dir,model_dir,filename)
(x_val,y_val) = ids_dataset.load_data(filelist,hdf5_format)
Example #6
0
#scan_type = 'T'
scan_type = 'C'
path_data = '/home/lfabbrini/data'
dataset_dir = 'NN_PNGrgbSScan_bal_m_wD_TgU_wUnkGT_P0e001__NAcq40_Tex4_201831211597/STC'
model_dir = 'mdl_tr80val10te10fs1_0001'
#model_dir = 'mdl_debug'
grayscale = False

filename = scan_type + '_sublist_val.txt'
(x_val_c, y_val_c), (anom_id, acq_id) = ids.get_central_data_from_filelist(
    filename, path_data, dataset_dir, model_dir, grayscale)

filename = scan_type + '_sublist_test.npz'  #test to reply tensorboard performance
#filename = scan_type+'_sublist_val.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_val, y_val) = ids_dataset.load_data(filelist)

filename = scan_type + '_sublist_train_mean.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_mu, y_mu) = ids_dataset.load_data(filelist)

x_val = (x_val - x_mu) / 255
x_val_c = (x_val_c - x_mu) / 255
#%%Load model
from keras.models import load_model
#model_to_load = 'ids_Bscan0e01sgd1521021533.h5'
#model_to_load = 'ids_Bscan0e01sgd1521101393.h5'
#model_to_load = 'ids_Cscan0e001sgd1521723396.h5'
#model_to_load = 'ids_Cscan0e001sgd1521723396-05-0.95.hdf5'
model_to_load = 'ids_Cscan0e01sgd1521802004-10-0.93.hdf5'
model = load_model(model_to_load)
#scan_type = 'B'
#scan_type = 'T'
scan_type = 'C'
path_data = '/home/lfabbrini/data'
dataset_dir = 'NN_PNGrgbSScan_bal_m_wD_TgU_wUnkGT_P0e001__NAcq40_Tex4_201831211597/STC'
model_dir = 'mdl_tr80val10te10fs1_0001'
#model_dir = 'mdl_debug'
grayscale = False

filename = scan_type + '_sublist_val.txt'
(x_val_c, y_val_c), (anom_id, acq_id) = ids.get_central_data_from_filelist(
    filename, path_data, dataset_dir, model_dir, grayscale)

filename = scan_type + '_sublist_train_mean.npz'
filelist = os.path.join(path_data, dataset_dir, model_dir, filename)
(x_mu, y_mu) = ids_dataset.load_data(filelist)

x_val_c = (x_val_c - x_mu) / 255

#%% load image
from keras.preprocessing import image
import re
is_target_id_re = re.compile(r"T18")
#is_target_id_re = re.compile(r"FA21")
img = x_val_c[0]
for i, anom in enumerate(anom_id):
    is_target_id_res = is_target_id_re.search(anom)
    if is_target_id_res != None:
        img = x_val_c[i]
        print('image {} found!'.format(anom))
        break