Beispiel #1
0
def load_train_data(filepath, split=split, random_state=random_state):

    id, features_train, target_train = load_train(filepath)
    image_train = load_image(id)

    sss = StratifiedShuffleSplit(n_splits=1,
                                 train_size=split,
                                 random_state=random_state)
    train_i, val_i = next(sss.split(features_train, target_train))
    features_val, val_img, val_target = features_train[val_i], image_train[
        val_i], target_train[val_i]
    features_train, train_img, train_target = features_train[
        train_i], image_train[train_i], target_train[train_i]

    return (features_train, train_img, train_target), (features_val, val_img,
                                                       val_target)
Beispiel #2
0
def main():
    args = parser.parse_args()
    learning_rate = args.learning_rate
    if args.data_load == 'True':
        images = np.load('images.npy')
        labels = np.load('labels.npy')
    elif args.data_load == 'False':
        dataset = load_image(args.image_dir, args.n_process, args.shape)
        images, labels = dataset
        np.save('images.npy', images)
        np.save('labels.npy', labels)
    else:
        print('Data load 인자 확인요망')
        return
    print('Dataset Shape :', images.shape)
    counter = 1
    epoch = 1
    counter_path = os.path.join(CHECKPOINT_DIR, args.run_name, 'counter.json')
    if os.path.exists(counter_path):
        with open(counter_path, 'r') as f:
            ce = json.load(f)
            counter = ce['counter']
            epoch = ce['epoch']
    model_path = os.path.join(CHECKPOINT_DIR, args.run_name, 'best_model.h5')
    mckpt = ModelCheckpoint(filepath=model_path,
                            monitor='val_loss',
                            verbose=1,
                            save_best_only=True)
    input_image = Input(shape=args.shape + (1, ))
    c_ae = model(args, input_image)
    c_ae.compile(optimizer=Adam(lr=args.learning_rate),
                 loss='binary_crossentropy')
    c_ae.summary()
    # images = images / 255.
    c_ae.fit(images,
             images,
             batch_size=args.batch_size,
             epochs=args.epochs,
             shuffle=True,
             validation_split=0.2,
             callbacks=[mckpt, SampleAndReconstruct(args)])
Beispiel #3
0
def load_test_data(filepath):

    id, test_features = load_test(filepath)
    test_images = load_image(id)

    return (id, test_features, test_images)
def main():
    images = load_image(110)
    find_normal_vector_edges_like_paper(images[0])
Beispiel #5
0
import numpy as np
import load_images


def calculate_possible_values(image):
    missing_values = np.ma.masked_equal(0, image)
    print(missing_values)
    quit()


if __name__ == '__main__':
    image = load_images.load_image(0)
    calculate_possible_values(image)
    quit()