Ejemplo n.º 1
0
    '..', '04-conv_net', 'model_export', 'dataset_mix',
    '2019-08-13_22-25-12_1_3_train_samples_fiji_and_mathematica_segmentations')

# Specify the standardization mode
standardization_mode = 'per_sample'

# Specify the linear output scaling factor !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
linear_output_scaling_factor = 1  #1e11#409600000000

# Specify if the results are saved
save_results = False

#%%############################################################################
# Initialize the CNN
###############################################################################
cnn = CNN(linear_output_scaling_factor=linear_output_scaling_factor,
          standardization_mode=standardization_mode)
cnn.load_model_json(model_import_path, 'model_json', 'best_weights')

#%%############################################################################
# Predict the density-map
###############################################################################
table = []
table.append([
    'Category', 'Spheroid',
    'Ground-Truth number of cells (cell-volumes > 3um^3)',
    'Number of cells (predicted)', 'Absolute difference',
    'Percentual difference'
])

subdirs1 = get_immediate_subdirectories(path_to_data)
for subdir1 in subdirs1:
#category = 'NPC1'
#spheroid_name = 'C3-7.nrrd'
#path_to_spheroid = os.path.join('..', '..', '..', 'Datensaetze', 'Aufnahmen_und_Segmentierungen', 'Datensatz2', category, spheroid_name)

category = 'none'
spheroid_name = 'X_scaled.nrrd'
path_to_spheroid = os.path.join('..', '..', '..', 'Datensaetze',
                                'OpenSegSPIM_Beispieldaten', 'Neurosphere',
                                spheroid_name)

#path_to_spheroid = os.path.join('Skalierung', 'NPC1', 'C3-2-1_1-3_upper.nrrd')

#%%############################################################################
# Initialize the CNN
###############################################################################
cnn = CNN(linear_output_scaling_factor=linear_output_scaling_factor,
          standardization_mode=standardization_mode)
cnn.load_model_json(model_import_path, 'model_json', 'best_weights')

#%%############################################################################
# Predict the density-map
###############################################################################
spheroid_new, density_map, num_of_cells = cnn.predict_density_map(
    path_to_spheroid=path_to_spheroid,
    patch_sizes=patch_sizes,
    strides=strides,
    border=cut_border,
    padding=padding)
plt.figure()
plt.imshow(spheroid_new[int(spheroid_new.shape[0] / 2), ])
plt.figure()
plt.imshow(density_map[int(density_map.shape[0] / 2), ])
# Specify which model is used
model_import_path = os.path.join('..', '04-conv_net', 'model_export', 'dataset1', '2019-08-10_09-13-46_1_3_train_samples_fiji_SEGMENTATIONS_crossentropy_256_epochs')

# Specify the standardization mode
standardization_mode = 'per_sample'

# Specify the linear output scaling factor !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
linear_output_scaling_factor = 1#1e11#409600000000

colocalization_threshold = 10.0

#%%############################################################################
# Initialize the CNN
###############################################################################
cnn = CNN(linear_output_scaling_factor=linear_output_scaling_factor, 
          standardization_mode=standardization_mode)
cnn.load_model_json(model_import_path, 'model_json', 'best_weights')

table = []
table.append(['Cultivation-period', 'Spheroid', 'Ground-Truth number of cells (cell-volumes > 3um^3)', 'Number of cells (predicted)', 'Absolute difference', 'Percentual difference', 'Number of colocalized cells', 'Percentual number of colocalized cells'])

for directory in os.listdir(path_to_data):
    data_dir = os.path.join(path_to_data, directory, 'untreated')
    if os.path.exists(data_dir):
        for filename in os.listdir(data_dir):
            if filename.endswith('.nrrd'):
                spheroid_name = os.path.splitext(filename)[0]
                #print('Actual file: ', spheroid_name)
                for subdir in os.listdir(data_dir):
                    res_dir = os.path.join(data_dir, subdir)
                    if(os.path.isdir(res_dir)):
Ejemplo n.º 4
0
#%%############################################################################
# Plot the dataset distributions
###############################################################################
if plt_hist == True:
    plot_dataset_histogram(path_to_dataset=path_to_dataset,
                           data_list=train_list)
    plot_dataset_histogram(path_to_dataset=path_to_dataset, data_list=val_list)
    plot_dataset_histogram(path_to_dataset=path_to_dataset,
                           data_list=test_list)

#%%############################################################################
# Define the model
###############################################################################

cnn = CNN(linear_output_scaling_factor=linear_output_scaling_factor,
          standardization_mode=standardization_mode)

cnn.define_model(input_shape=input_shape,
                 filters_exp=filters_exp,
                 kernel_size=kernel_size,
                 pool_size=pool_size,
                 hidden_layer_activation=hidden_layer_activation,
                 output_layer_activation=output_layer_activation,
                 padding=padding)

#cnn.define_unet(input_shape=input_shape, n_filters=8, kernel_size=3,
#                  batchnorm=batchnorm, hidden_layer_activation=hidden_layer_activation,
#                  output_layer_activation=None, pool_size=2, padding=padding)

#cnn.define_unet(input_shape=input_shape, n_filters=n_filters, kernel_size=kernel_size, kernel_initializer=kernel_initializer,
#                  pool_size=pool_size, hidden_layer_activation=hidden_layer_activation, alpha=alpha, batchnorm=batchnorm,
Ejemplo n.º 5
0
                           data_list=train_files,
                           hist_export_file=os.path.join(
                               model_export_path, 'train_hist.png'))
    plot_dataset_histogram(path_to_dataset=path_to_dataset,
                           data_list=val_files,
                           hist_export_file=os.path.join(
                               model_export_path, 'val_hist.png'))
    plot_dataset_histogram(path_to_dataset=path_to_dataset,
                           data_list=test_files,
                           hist_export_file=os.path.join(
                               model_export_path, 'test_hist.png'))

###############################################################################
# Define the model
###############################################################################
cnn = CNN(linear_output_scaling_factor=linear_output_scaling_factor,
          standardization_mode=standardization_mode)
cnn.define_unet(input_shape=input_shape,
                n_filters=n_filters,
                kernel_size=kernel_size,
                pool_size=pool_size,
                kernel_initializer=kernel_initializer,
                hidden_layer_activation=hidden_layer_activation,
                alpha=alpha,
                batchnorm_encoder=batchnorm_encoder,
                batchnorm_decoder=batchnorm_decoder,
                regularization_rate=regularization_rate,
                dropout_rate=dropout_rate,
                output_layer_activation=output_layer_activation,
                upsampling_method=upsampling_method,
                padding=padding)
    train_spheroids=test_spheroids,
    val_spheroids=test_spheroids,
    test_spheroids=test_spheroids)

# Load unstandardized test data
X_test_data, y_test_data = datatools.load_data(path_to_dataset=path_to_dataset,
                                               data_list=test_files,
                                               input_shape=data_shape,
                                               standardization_mode=None,
                                               border=border)

###############################################################################
# Load the model
###############################################################################
#cnn.load_model_single_file(import_path, 'model_single')
cnn = CNN(linear_output_scaling_factor=linear_output_scaling_factor,
          standardization_mode=standardization_mode)
cnn.load_model_json(model_import_path, 'model_json', weights)

# Evaluate the model on the test data
summary = cnn.compile_model(loss=loss, optimizer=optimizer, metrics=metrics)
test_loss_best_weights = cnn.evaluate_model(X_test=X_test_data,
                                            y_test=y_test_data,
                                            batch_size=batch_size)
print(test_loss_best_weights)

# Export the value of the test-loss
test_loss_export_path = os.path.join('test_loss_best_weights.txt')
with open(test_loss_export_path, 'w') as file:
    for l in range(len(test_loss_best_weights)):
        file.write(str(test_loss_best_weights[l]) + '\n')
Ejemplo n.º 7
0
# Read the data
#path_to_nuclei = os.path.join('test_data', '48h-X-C2-untreated_3.nrrd')
path_to_nuclei = os.path.join('..', '..', '..', 'Daten', '72h', 'untreated',
                              'C2-untreated_4.nrrd')
data, header = nrrd.read(path_to_nuclei)

plt.imshow(data[:, :, 55])

# Load the CNN
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
linear_output_scaling_factor = 1e12
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
standardization_mode = 'per_sample'
cnn = CNN(linear_output_scaling_factor=linear_output_scaling_factor,
          standardization_mode=standardization_mode)
import_path = os.path.join(os.getcwd(), 'model_export', '2019-07-19_18-50-39')
cnn.load_model_json(import_path, 'model_json', 'model_weights')

# Generate image patches
size_z = patch_slices = 32
size_y = patch_rows = 32
size_x = patch_cols = 32
stride_z = stride_slices = 16
stride_y = stride_rows = 16
stride_x = stride_cols = 16
session = tf.Session()
patches = impro.gen_patches(session=session,
                            data=data,
                            patch_slices=size_z,
                            patch_rows=size_y,