Exemplo n.º 1
0
def main(directory):

    filenames = data.get_image_files(directory)

    bs = 1000
    batches = [
        filenames[i * bs:(i + 1) * bs]
        for i in range(int(len(filenames) / bs) + 1)
    ]

    # compute mean and std
    std(filenames, bs)

    Us, evs = [], []
    for batch in batches:
        images = np.array([data.load_augment(f, 128, 128) for f in batch])
        X = images.transpose(0, 2, 3, 1).reshape(-1, 3)
        cov = np.dot(X.T, X) / X.shape[0]
        U, S, V = np.linalg.svd(cov)
        ev = np.sqrt(S)
        Us.append(U)
        evs.append(ev)

    print('U')
    print(np.mean(Us, axis=0))
    print('eigenvalues')
    print(np.mean(evs, axis=0))
Exemplo n.º 2
0
    def __getitem__(self, idx):
        fname = self.input_path + '/' + self.file_list[idx]

        w = 224
        h = 224
        aug_params = {
            'zoom_range': (1 / 1.15, 1.15),
            'rotation_range': (0, 360),
            'shear_range': (0, 0),
            'translation_range': (-20, 20),
            'do_flip': True,
            'allow_stretch': True,
        }
        sigma = 0.25
        image = load_augment(fname,
                             w,
                             h,
                             aug_params=aug_params,
                             transform=None,
                             sigma=sigma,
                             color_vec=None)
        #print('after', image.shape)
        data = h5py.File(self.input_path + '/' + self.file_list[idx], 'r')
        #image = data['image'].value
        #print('before', image.shape)
        target = float(data['target'].value)
        #one_hot = np.zeros(5)
        #one_hot[target] = 1
        if self.transform is not None:
            image = self.transform(image)
        return image, torch.from_numpy(np.array([target]))
def compute_mean_std(files, batch_size=128):
    """Load images in files in batches and compute mean."""
    m = np.zeros(3)
    s = np.zeros(3)
    s2 = np.zeros(3)
    for i in range(0, len(files), batch_size):
        print("done with {:>3} / {} images".format(i, len(files)))
        images = np.array([data.load_augment(f,256,256) for f in files[i : i + batch_size]])
        m += images.sum(axis=(0, 2, 3))
        shape = images.shape
        s2 += np.power(images, 2).sum(axis=(0, 2, 3))
    n = len(files) * shape[2] * shape[3]
    var = (s2 - m**2.0 / n) / (n - 1)
    return (m / n).astype(np.float32),np.sqrt(var)
Exemplo n.º 4
0
def main(filelist):
    filenames = data.get_files(filelist) 

    bs = 1000
    batches = [filenames[i * bs : (i + 1) * bs] 
               for i in range(int(len(filenames) / bs) + 1)]

    Us, evs = [], []
    for batch in batches:
        images = np.array([data.load_augment(f, 256, 256) for f in batch])
        X = images.transpose(0, 2, 3, 1).reshape(-1, 3)
        cov = np.dot(X.T, X) / X.shape[0]
        U, S, V = np.linalg.svd(cov)
        ev = np.sqrt(S)
        Us.append(U)
        evs.append(ev)
    print('U')
    print(np.mean(Us, axis=0))
    print('eigenvalues')
    print(np.mean(evs, axis=0))
def load_shared(args):
    i, array_name, fname, kwargs = args
    array = SharedArray.attach(array_name)
    array[i] = data.load_augment(fname, **kwargs)
def load_shared(args):
    i, array_name, fname, kwargs = args
    array = SharedArray.attach(array_name)
    array[i] = data.load_augment(fname, **kwargs)
Exemplo n.º 7
0
def load_shared(args):
    i, array_name, fname, kwargs = args
    np.random.seed(i) #comment this line if you want a non reproducible result
    array = SharedArray.attach(array_name)
    array[i] = data.load_augment(fname, **kwargs)