Beispiel #1
0
def make_tensors(samples_dir, tensor_size=112):
    import pickle
    num_plot = 32
    # load samples
    listFilesInDir = os.listdir(samples_dir)
    for files in listFilesInDir:
        if files.endswith(".sp"):  # find samples file
            file_path = os.path.join(samples_dir, files)
            _, tempfilename = os.path.split(file_path)
            filename, _ = os.path.splitext(tempfilename)
            samples_category = filename.split(" ")[-1]
            # read
            patches_tensor, labels_tensor, label_note_list = pickle.load(
                open(file_path, mode='rb'))
            # plot samples
            mask = helper.plot_images(
                num_plot,
                patches_tensor,
                labels_tensor,
                labels_note=label_note_list,
                data_scale=1,
                img_aspect=1,
                Square_sampling=False,
                fig_title="Raw samples - {}".format(samples_category))

            #------- make learning tensors ------#

            learning_tensor = np.zeros(
                (len(patches_tensor), tensor_size, tensor_size))
            print("\n Making tensor data set ... : {}".format(
                learning_tensor.shape))
            for i in tqdm(np.arange(len(patches_tensor))):
                learning_tensor[i, :] = helper.Square_Sampling(
                    patches_tensor[i, :],
                    square_size=tensor_size)  # resampling
            # reshape to learning dataset
            learning_tensor = learning_tensor.reshape(-1, tensor_size,
                                                      tensor_size, 1)
            # plot learning tensors
            helper.plot_images(
                num_plot,
                learning_tensor,
                labels_tensor,
                labels_note=label_note_list,
                data_scale=1,
                img_aspect=1,
                Square_sampling=False,
                fig_title="Learning Tensors - {}".format(samples_category),
                mask=mask)

            #----------- save tensors -----------#
            print("\nSaving learning tensor from: {}".format(filename))
            save_tensor_name = "{} tensor{} - {}.ds".format(
                samples_category, tensor_size, filename)
            pickle.dump((learning_tensor, labels_tensor),
                        open(os.path.join(samples_dir, save_tensor_name),
                             'wb'),
                        protocol=4)  # train set
def detect_edges(filename, **kwargs):
    debug__('[INFO] Detecting edges in %s' % filename)
    orignal = cv2.imread(filename)
    img = cv2.imread(filename, 0)
    edges = all_edges(img, **kwargs)
    outfile = kwargs.get('outfile', '%s_edges.png' % filename)
    debug__('Wrote edges to %s' % outfile)
    helper.plot_images({'original': orignal, 'edges': edges}, outfile=outfile)
    return edges
Beispiel #3
0
def contours(img, **kwargs):
    debugLevel = kwargs.get('debug', -1)
    edges = edge_detector.all_edges( img, debug = 0, **kwargs )
    cnts, heir = cv2.findContours(edges, cv2.RETR_EXTERNAL
            , cv2.CHAIN_APPROX_TC89_KCOS)
    if debugLevel > 0:
        cntImg = np.zeros( img.shape )
        cv2.drawContours( cntImg, cnts, -1, 255 )
        helper.plot_images( { 'original' : img, 'contours' : cntImg } )
    return cnts
Beispiel #4
0
def contours(img, **kwargs):
    debugLevel = kwargs.get('debug', -1)
    edges = edge_detector.all_edges(img, debug=0, **kwargs)
    cnts, heir = cv2.findContours(edges, cv2.RETR_EXTERNAL,
                                  cv2.CHAIN_APPROX_TC89_KCOS)
    if debugLevel > 0:
        cntImg = np.zeros(img.shape)
        cv2.drawContours(cntImg, cnts, -1, 255)
        helper.plot_images({'original': img, 'contours': cntImg})
    return cnts
Beispiel #5
0
def detect_edges( filename, **kwargs ):
    debug__('[INFO] Detecting edges in %s' % filename)
    orignal = cv2.imread( filename )
    img = cv2.imread(filename, 0)
    edges = all_edges(img, **kwargs )
    outfile = kwargs.get('outfile', '%s_edges.png' % filename)
    debug__('Wrote edges to %s' % outfile )
    helper.plot_images( { 'original' : orignal, 'edges' : edges }
            , outfile = outfile
            )
    return edges
Beispiel #6
0
def plot_images_comparison(idx):
    helper.plot_images(images=data.test.images[idx, :],
                       cls_true=data.test.cls[idx],
                       ensemble_cls_pred=ensemble_cls_pred[idx],
                       best_cls_pred=best_net_cls_pred[idx])
Beispiel #7
0
print("Size of:")
print("- Training-set:\t\t{}".format(len(data.train.labels)))
print("- Test-set:\t\t{}".format(len(data.test.labels)))
print("- Validation-set:\t{}".format(len(data.validation.labels)))

data.test.cls = np.argmax(data.test.labels, axis=1)
data.validation.cls = np.argmax(data.validation.labels, axis=1)

# DATA DIMENSION #
img_size = 28
img_size_flat = img_size * img_size
img_shape = (img_size, img_size)
num_channels = 1
num_classes = 10

helper.plot_images(data.test.images[:16], 4, img_shape, data.test.cls[:16])

# TENSORFLOW GRAPH #
# Placeholder Variables
x = tf.placeholder(tf.float32, shape=[None, img_size_flat], name='x')
x_image = tf.reshape(x, [-1, img_size, img_size, num_channels])
y_true = tf.placeholder(tf.float32, shape=[None, 10], name='y_true')
y_true_cls = tf.argmax(y_true, axis=1)

# Neural Network
x_pretty = pt.wrap(x_image)

with pt.defaults_scope(activation_fn=tf.nn.relu):
    y_pred, loss = x_pretty.\
        conv2d(kernel=5, depth=16, name='layer_conv_1').\
        max_pool(kernel=2, stride=2).\
Beispiel #8
0
import numpy as np
import matplotlib
from matplotlib import pyplot as plt
from keras.models import Sequential
from keras.models import load_model

from helper import get_class_names, get_train_data, get_test_data, plot_images, plot_model

images_test, labels_test, class_test = get_test_data()
class_name = get_class_names()
model = load_model("best_model_simple.h5")
pred = model.predict(images_test, batch_size=32)
# print(pred[0])

exp_label = np.argmax(pred, axis=1)
# print(exp_label)

correct = (labels_test == exp_label)
incorrect = (labels_test != exp_label)

mis_images = images_test[incorrect]
mis_labels = exp_label[incorrect]
correct_labels = labels_test[incorrect]

plot_images(images=images_test[0:16],
            labels_true=labels_test[0:16],
            class_names=class_name,
            labels_pred=exp_label[0:16])
# print(sum(correct)/len(images_test))
Beispiel #9
0
def generate_samples(num_samples, patch_height, samples_category,
                     tag_type_list, corrupt_list, samples_save_dir):
    patches_list = []
    labels_list = []

    # generate patches
    for tag_type in tag_type_list:
        patch_list, patch_labels = generate_patch(num_samples * tag_type[0],
                                                  type=tag_type[1],
                                                  patch_size=patch_height)
        # merge patches
        patches_list.append(patch_list)
        labels_list.append(patch_labels)

    # make samples
    print("\n\n Processing {} samples, patch size: {} {}".format(
        num_samples, patch_height, " for {}".format(samples_category)))
    patches_tensor, labels_tensor, label_note_list = make_samples(
        patches_list,
        labels_list,
        corrupt_list,
        shuffle=True,
        seismic_standardize=True)

    #----------- plot samples -----------#
    num_image = 32
    helper.plot_images(num_image,
                       patches_tensor,
                       labels_tensor,
                       labels_note=label_note_list,
                       fig_title="Raw samples - {}".format(samples_category))

    #----------- save samples -----------#
    import time
    import pickle
    print("\nSaving samples ...")
    pickle.dump((patches_tensor, labels_tensor, label_note_list),
                open(
                    os.path.join(
                        samples_save_dir,
                        "Patches_samples _n{}s{} {} {}.sp".format(
                            num_samples, patch_height,
                            time.strftime("%Y%m%d%H%M"), samples_category)),
                    'wb'),
                protocol=4)  # save samples
    # note for samples
    note_file = os.path.join(
        samples_save_dir, "Note for samples _n{}s{} {}, {}.txt".format(
            num_samples, patch_height, time.strftime("%Y%m%d%H%M"),
            samples_category))
    with open(note_file, "w") as log:
        log.write("{} note for samples \n\n".format(
            time.strftime("%Y%m%d%H%M")))
        log.write("Tag type:\n")
        for tag_type in tag_type_list:
            log.write(" {}: {}\n".format(tag_type[1], tag_type[0]))
        log.write(
            "\nNumber of samples: {}; patch height (size): {}\n\n".format(
                num_samples, patch_height))
        log.write("\ncorrupt info.:\n\n")
        for aug in corrupt_list:
            log.write(" {}\n\n".format(str(aug)))