def get_dataset(): cache_file = 'dataset_cache.pkl.gz') if os.path.exists(cache_file): with open(cache_file, 'rb') as f: dataset = pickle.load(f) return dataset['data'], dataset['target'] raw_dataset = load_raw_images() data = load_image_files(raw_dataset.filenames) data = np.array(list(data)) mask_dataset = load_mask_images() masks = load_image_files(mask_dataset.filenames) target = convert_masks_to_target(masks, negative=True) with open(cache_file, 'wb') as f: dataset = {'data': data, 'target': target} pickle.dump(dataset, f) return data, target
import time import skimage.io as io from skimage.transform import resize from dip.load_data import load_raw_images, load_mask_images parser = argparse.ArgumentParser() parser.add_argument('--shape0', type=int, default=1424) parser.add_argument('--shape1', type=int, default=2136) args = parser.parse_args() shape = (args.shape0, args.shape1) raw_dataset = load_raw_images() for i, f in enumerate(raw_dataset.filenames): print(f) img = io.imread(f) if i == 0: print('resize: {0} -> {1}'.format(img.shape[:2], shape)) resized = resize(img, output_shape=shape) io.imsave(f, resized) mask_dataset = load_mask_images() for i, f in enumerate(mask_dataset.filenames): print(f) img = io.imread(f) if i == 0: print('resize: {0} -> {1}'.format(img.shape[:2], shape))
stream=sys.stdout) import cPickle as pickle import gzip import os import numpy as np from sknn import ae, mlp from sklearn.preprocessing import normalize from sklearn.cross_validation import train_test_split from skimage.transform import resize from dip.load_data import load_raw_images, load_image_files datasets = load_raw_images() filenames = datasets.filenames filenames_train, filenames_test = train_test_split(filenames) batch_size = 30 N = len(filenames) n_param = 5 n_iter = 10 myae = ae.AutoEncoder( layers=[ ae.Layer('Tanh', units=128),