示例#1
0
def calc(I):
    model = dp.trainmodel(Data_compact,
                          TrainList,
                          EvalList,
                          retrain=1,
                          epochs=100,
                          batch_size=100,
                          ModelType=I)
    dp.testmodel(model, Data_compact, TestList, ModelType=I)
示例#2
0
import DeePore as dp
# Part #1    A quick start
# Feed your porous material image to see its properties predicted
# 1. load the trained model
model = dp.loadmodel()
# 2. read and transform your data into initial feature maps
# in this example, the data is a 400 x 400 x 400 binary MATLAB mat file in which 0 indicates void and 1 indicates solid space
A = dp.feedsampledata(FileName="Data/Sample_large.mat")
# 3. show feature maps (optional)
dp.show_feature_maps(A)
# 4. predict properties
all_preds = dp.predict(
    model, A,
    res=4.8)  # res is the spatial resolution of image in micron/pixel
# 5. save results into a text file and also print it in console
dp.prettyresult(all_preds, 'results.txt')

# Part #2    Compatibility
# 1. you can try to load numpy 3-d arrays with the same manner
A = dp.feedsampledata(FileName="Data/Sample.npy")
# 2. also 2-d images with formats of jpg and png are welcome
# if you import a 2-d image, the code creates 3 arbitrary mid-slices by flipping the 2-d image
A = dp.feedsampledata(FileName="Data/Sample.jpg")
A = dp.feedsampledata(FileName="Data/Sample.png")
# 3. when your image is larger than 256 x 256 x 256, the code automatically consider sliding windows to cover the whole image and report back to you the averaged predictions
A = dp.feedsampledata(FileName="Data/Sample_large.mat")
# when the data is loaded and transformed to the initial feature maps using this function, you are good to go and find its properties as shown above.
示例#3
0
import DeePore as dp
# Explore the dataset: 
# If you want to open images of dataset and visualize them
# 1. check or download the complete dataset 
Data_complete='Data\DeePore_Dataset.h5'
# Data_complete='..\..\..\BigData\DeePore\DeePore_Dataset.h5'
dp.check_get('https://zenodo.org/record/4297035/files/DeePore_Dataset.h5?download=1',Data_complete)
# 2. read the first image out of 17700
A=dp.readh5slice(Data_complete,'X',[0]) 
# 3. show mid-slices of the loaded image
dp.showentry(A)
# 4. show and save the properties of this image which assumed to be the ground truth as text file
props=dp.readh5slice(Data_complete,'Y',[0])
dp.prettyresult(props,'sample_gt.txt',units='px')


示例#4
0
import DeePore as dp
# Comparing statistics of the training, validation and testing data:
# 1. check or download the compact data
Data_compact = 'Data\DeePore_Compact_Data.h5'
# Data_compact='..\..\..\BigData\DeePore\DeePore_Compact_Data.h5'
dp.check_get(
    'https://zenodo.org/record/4297035/files/DeePore_Compact_Data.h5?download=1',
    Data_compact)
# 2. prepare the dataset by removing outliers and creating list of training, validation and test samples
List = dp.prep(Data_compact)
TrainList, EvalList, TestList = dp.splitdata(List)

# 3. read datasets 'Y' into arrays
Data_Eval = dp.readh5slice(Data_compact, 'Y', EvalList)
Data_Train = dp.readh5slice(Data_compact, 'Y', TrainList)
Data_Test = dp.readh5slice(Data_compact, 'Y', TestList)

# exporting to MATLAB for extra postprocessing if you needed
# import scipy.io as sio
# sio.savemat('All_Data.mat',{'train':Data_Train,'eval':Data_Eval,'test':Data_Test})

# 4. plot histograms
import matplotlib.pyplot as plt
FN = 5  # feature id number, you can select 0 to 14
h = plt.hist(Data_Eval[:, FN, 0],
             50,
             histtype='step',
             density=True,
             label='validation')
h = plt.hist(Data_Train[:, FN, 0],
             50,
示例#5
0
import DeePore as dp

# Comparing different model architectures:
# 1. check or download the compact data
Data_compact = 'Data\DeePore_Compact_Data.h5'
Data_compact = '..\..\..\BigData\DeePore\DeePore_Compact_Data.h5'
dp.check_get(
    'https://zenodo.org/record/4297035/files/DeePore_Compact_Data.h5?download=1',
    Data_compact)
# 2. prepare the dataset by removing outliers and creating list of training, evaluation and test samples
List = dp.prep(Data_compact)

# 3. shuffling the dataset
List = dp.shuf(List)

# List=List[1:1000]     #uncomment for a smaller dataset for test purposes
TrainList, EvalList, TestList = dp.splitdata(List)


# 4. defining the training and testing workflows
def calc(I):
    model = dp.trainmodel(Data_compact,
                          TrainList,
                          EvalList,
                          retrain=1,
                          epochs=100,
                          batch_size=100,
                          ModelType=I)
    dp.testmodel(model, Data_compact, TestList, ModelType=I)

示例#6
0
import DeePore as dp
# Retrain and test the model:
# If you want to try you own architecture of neural network or retrain the present one
# 1. check or download the compact data
Data_compact = 'Data\DeePore_Compact_Data.h5'
# Data_compact='..\..\..\BigData\DeePore\DeePore_Compact_Data.h5'
dp.check_get(
    'https://zenodo.org/record/4297035/files/DeePore_Compact_Data.h5?download=1',
    Data_compact)

# 2. prepare the dataset by removing outliers and creating list of training, evaluation and test samples
List = dp.prep(Data_compact)
TrainList, EvalList, TestList = dp.splitdata(List)
# 3. retrain the model
model = dp.trainmodel(Data_compact,
                      TrainList,
                      EvalList,
                      retrain=0,
                      epochs=50,
                      batch_size=100,
                      ModelType=3)
# 4. test the model
dp.testmodel(model, Data_compact, TestList)