示例#1
0
    # load square aid
    f = h5py.File(data_directory + '/aid' + str(resize_size[0]) + '.h5', 'r')
    data = f['images'][()]
    labels = f['labels'][()]
    class_names = f.attrs['class_names']
    f.close()

    # Normalize data
    data = utils.normalize(data)

    data = data.reshape(data.shape[0], data.shape[1], resize_size[0],
                        resize_size[1])

    # Datasets
    dataset = utils.NumpyDataset(data, labels, transform=utils.NumpyToTensor())

    # Run the experiments
    for seed in seeds:
        # Data loaders
        logger.info('Split data with seed {}'.format(seed))
        torch.manual_seed(seed)
        np.random.seed(seed)
        train_indices = []
        val_indices = []
        for cls in np.unique(labels):
            indices = np.where(labels == cls)
            indices = np.random.permutation(indices[0])
            train_indices.append(
                indices[:int(len(indices) * (1 - validating_ratio))])
            val_indices.append(
示例#2
0
    # Whitening data
    logger.info('Computing whitening matrices...')
    train_data_all_flat = train_data_all.reshape(train_data_all.shape[0], -1).T
    test_data_flat = test_data.reshape(test_data.shape[0], -1).T
    pca_all = utils.PCA(D=train_data_all_flat, n_components=train_data_all_flat.shape[1])

    logger.info('Whitening data...')
    train_data_all_flat = pca_all.transform(D=train_data_all_flat, whiten=True, ZCA=True)
    train_data_all = train_data_all_flat.T.reshape(train_data_all.shape[0:2] + (-1,))

    test_data_flat = pca_all.transform(D=test_data_flat, whiten=True, ZCA=True)
    test_data = test_data_flat.T.reshape(test_data.shape[0:2] + (-1,))

    # Datasets
    train_set = utils.NumpyDataset(train_data_all, train_labels_all, transform=utils.NumpyToTensor())
    val_set = utils.NumpyDataset(test_data, test_labels, transform=utils.NumpyToTensor())
    test_set = utils.NumpyDataset(test_data, test_labels, transform=utils.NumpyToTensor())
    # Data loaders
    train_loader = DataLoader(train_set, batch_size=batch_size, shuffle=True, num_workers=8)
    val_loader = DataLoader(val_set, batch_size=batch_size, shuffle=False, num_workers=8)
    test_loader = DataLoader(test_set, batch_size=batch_size, shuffle=False, num_workers=8)

    # Run the experiments
    for seed in seeds:
        logger.info('Train model with seed {}'.format(seed))
        # TensorboardX writer
        writer = SummaryWriter(main_directory + '/runs/' + experiment_name + '_' + str(seed))

        # Plot a resampled image to check
        if hexa:
示例#3
0
                        n_components=train_data_all_flat.shape[1])

    logger.info('Whitening data...')
    train_data_all_flat = pca_all.transform(D=train_data_all_flat,
                                            whiten=True,
                                            ZCA=True)
    train_data_all = train_data_all_flat.T.reshape(train_data_all.shape[0:2] +
                                                   (-1, ))

    test_data_flat = pca_all.transform(D=test_data_flat, whiten=True, ZCA=True)
    test_data = test_data_flat.T.reshape(test_data.shape[0:2] + (-1, ))

    # Datasets
    train_set = utils.NumpyDataset(train_data_all,
                                   train_labels_all,
                                   transform=utils.NumpyToTensor())
    val_set = utils.NumpyDataset(test_data,
                                 test_labels,
                                 transform=utils.NumpyToTensor())
    test_set = utils.NumpyDataset(test_data,
                                  test_labels,
                                  transform=utils.NumpyToTensor())
    # Data loaders
    train_loader = DataLoader(train_set,
                              batch_size=batch_size,
                              shuffle=True,
                              num_workers=8)
    val_loader = DataLoader(val_set,
                            batch_size=batch_size,
                            shuffle=False,
                            num_workers=8)
        logger.info('Compare indexed conv and nn.Conv2d on hexagonal images with WideNet')

        f = h5py.File(data_directory + '/aid' + str(size) + '_hexa.h5', 'r')  # TODO check the existence of data
        data = f['images'][()]
        labels = f['labels'][()]
        index_matrix = torch.tensor(f['index_matrix'][()])
        class_names = f.attrs['class_names']
        f.close()

        data_shifted = np.zeros(data.shape[0:2] + index_matrix.shape).astype(np.float32)
        for i in range(index_matrix.shape[0]):
            for j in range(index_matrix.shape[1]):
                if not int(index_matrix[i, j]) == -1:
                    data_shifted[:, :, i, j] = data[:, :, int(index_matrix[i, j])]

        sh_dataset = utils.NumpyDataset(data_shifted, labels, transform=utils.NumpyToTensor())
        hex_dataset = utils.NumpyDataset(data, labels, transform=utils.NumpyToTensor())
        sh_loader = DataLoader(sh_dataset, batch_size=batch_size, shuffle=False)
        hex_loader = DataLoader(hex_dataset, batch_size=batch_size, shuffle=False)

        logger.info('batch size: {} iterations: {}'.format(batch_size, len(hex_loader)))

        index_matrix = index_matrix.unsqueeze_(0).unsqueeze_(0)
        indexed_net = WideNetIndexConvIndexPool(index_matrix, 'Hex', 30).to(device)
        nn_net = WideNetMasked(30).to(device)

        ram_b = (torch.cuda.memory_allocated() + torch.cuda.memory_cached()) / 1024 / 1024
        logger.info('Memory allocated : {} in MB'.format(ram_b))
        start_indexed = time.time()
        for d in hex_loader:
            out = indexed_net(d[0].to(device))