Esempio n. 1
0
 def test_layer_activation(self):
     network = create_dC2()
     network.loadWeights('SparseConvNet/weights/ModelNet_10_repeat_bs100_nthrd10/ModelNet', 200)
     lois = [
         network.layer_activations(
             Off3DPicture(
                 'SparseConvNet/Data/ModelNet/car/test/car_0216.off', 40)),
         network.layer_activations(
             Off3DPicture(
                 'SparseConvNet/Data/ModelNet/sink/test/sink_0133.off', 40))
     ]
     self.assertEqual(len(lois[0]), 19)
Esempio n. 2
0
def load_3d_off():
    path = "SparseConvNet/Data/ModelNet/airplane/train/airplane_0511.off"
    print("Creating Off3DPicture object")
    picture = Off3DPicture(path, 40)
    print("Codifying...")
    pairs, features = picture.codifyInputData(126)
    print("done")
    return pairs
Esempio n. 3
0
 def voxelize(self, ss, rs):
     pic = Off3DPicture(self.file_path, rs)
     pairs, _ = pic.codifyInputData(ss)
     list_of_coordinates = []
     for key_id, feature_idx in pairs:
         list_of_coordinates.append(
             ((key_id / ss / ss) % ss, (key_id / ss) % ss, key_id % ss))
     del pic
     return zip(*list_of_coordinates)
Esempio n. 4
0
 def test_predict(self):
     unlabeled_dataset = SparseDataset("One pic", 'UNLABELEDBATCH', 1, 1)
     network = create_dC2()
     num_of_inputs = 5
     nClasses = 40
     renderSize = 40
     test_file = ('SparseConvNet/Data/ModelNet/night_stand/'
                  'train/night_stand_0180.off')
     for i in range(num_of_inputs):
         unlabeled_dataset.add_picture(Off3DPicture(test_file, renderSize))
     matrix_of_preds = network.predict(unlabeled_dataset)
     self.assertEqual(matrix_of_preds.shape, (num_of_inputs, nClasses))
Esempio n. 5
0
def generate_modelnet_dataset(full=False, limit=-1):
    number_of_features = 1
    renderSize = 40
    if full:
        data_folder = "SparseConvNet/Data/ModelNet/"
    else:
        data_folder = "SparseConvNet/Data/_ModelNet/"
    class_folders = os.listdir(data_folder)
    class_folders.sort()
    number_of_classes = len(class_folders)
    sparse_dataset = SparseDataset("ModelNet (Train subset)", 'TRAINBATCH',
                                   number_of_features, number_of_classes)
    for class_id, folder in enumerate(class_folders):
        dirpath = os.path.join(data_folder, folder, 'train')
        for _count, filename in enumerate(os.listdir(dirpath)):
            if _count > limit > 0:
                break
            sparse_dataset.add_picture(
                Off3DPicture(os.path.join(dirpath, filename),
                             renderSize,
                             label=class_id))
    # sparse_dataset.repeatSamples(10)
    return sparse_dataset
Esempio n. 6
0
 def wraped(pic_path):
     pic = Off3DPicture(pic_path, renderSize)
     loi = network.layer_activations(pic)
     return np.array(loi[_layer]['features'])
Esempio n. 7
0
def train(ds,
          network,
          experiment_hash,
          batch_size=150,
          test_every_n_batches=100,
          unique_classes_in_batch=5,
          lr_policy=nop,
          momentum_policy=nop,
          pair_taking_method=0,
          render_size=40,
          weights_dir='./weights',
          in_batch_sample_selection=False,
          norm_type='cosine',
          L=1,
          epoch=0,
          epoch_limit=None,
          batch_iteration_hook=nop,
          epoch_iteration_hook=nop,
          pairs_limit=None):
    linear_triplet_loss, ltl_grad, norm = get_functions(norm_type=norm_type,
                                                        margin=L)
    ds.summary()
    gen = ds.generate_triplets(batch_size=batch_size,
                               unique_classes_in_batch=unique_classes_in_batch,
                               method=pair_taking_method,
                               limit=pairs_limit)

    weights_temp = os.path.join(weights_dir, experiment_hash)
    print('Taking {} batches in to dataset'.format(test_every_n_batches))
    if epoch_limit is None:
        if pairs_limit is None:
            total_pairs_num = sum(
                map(lambda l: len(l) * (len(l) - 1), ds.classes))
        else:
            total_pairs_num = ds.get_limit(pairs_limit) * ds.class_count
        total_number_of_epochs = int(
            np.ceil(total_pairs_num / (batch_size / 3.0) /
                    test_every_n_batches))
    else:
        total_number_of_epochs = epoch_limit
    for _ in tqdm(xrange(total_number_of_epochs),
                  total=total_number_of_epochs,
                  unit="epoch"):
        train_ds = SparseDataset(ds.name + " train",
                                 'TRAINBATCH',
                                 1,
                                 ds.class_count,
                                 shuffle=False)
        ranges_for_all = []
        for batch_samples, ranges in tqdm(islice(gen, test_every_n_batches),
                                          leave=False,
                                          desc="Creating dataset"):
            ranges_for_all.append(ranges)
            for _sample in batch_samples:
                train_ds.add_picture(Off3DPicture(_sample, render_size))
        if not ranges_for_all:
            break
        batch_gen = network.batch_generator(train_ds, batch_size)
        learning_rate = lr_policy(epoch)
        momentum = momentum_policy(epoch)
        for bid, (batch,
                  _ranges) in tqdm(enumerate(izip(batch_gen, ranges_for_all)),
                                   leave=False,
                                   unit='batch',
                                   total=test_every_n_batches):
            activation = network.processBatchForward(batch)
            feature_vectors = activation['features']
            delta = np.zeros_like(feature_vectors)
            batch_loss = []
            for _offset, _range in zip(3 * np.cumsum([0] + _ranges)[:-1],
                                       _ranges):
                if in_batch_sample_selection:
                    one_class_ids = np.arange(2 * _range) + _offset
                    other_class_ids = np.arange(2 * _range,
                                                3 * _range) + _offset
                    while one_class_ids.any():
                        anchor = one_class_ids[0]
                        positive_id = weighted_sampling_of_best(
                            np.apply_along_axis(
                                partial(norm, feature_vectors[anchor]), 1,
                                feature_vectors[one_class_ids[1:]]),
                            best='max')
                        positive_id += 1
                        negative_id = weighted_sampling_of_best(
                            np.apply_along_axis(
                                partial(norm, feature_vectors[anchor]), 1,
                                feature_vectors[other_class_ids]),
                            best='min')
                        triplet_slice = [
                            anchor, one_class_ids[positive_id],
                            other_class_ids[negative_id]
                        ]
                        one_class_ids = np.delete(one_class_ids,
                                                  [0, positive_id])
                        other_class_ids = np.delete(other_class_ids,
                                                    negative_id)
                        delta[triplet_slice] = ltl_grad(
                            feature_vectors[triplet_slice])
                        batch_loss.append(
                            linear_triplet_loss(
                                feature_vectors[triplet_slice]))
                else:
                    for _i in range(_range):
                        triplet_slice = _offset + (np.arange(3) * _range) + _i
                        delta[triplet_slice] = ltl_grad(
                            feature_vectors[triplet_slice])
                        batch_loss.append(
                            linear_triplet_loss(
                                feature_vectors[triplet_slice]))
            batch_iteration_hook(batch_loss=batch_loss, epoch=epoch, bid=bid)
            network.processBatchBackward(batch,
                                         delta,
                                         learningRate=learning_rate,
                                         momentum=momentum)
        network.saveWeights(weights_temp, epoch)
        epoch_iteration_hook(_network=network,
                             learning_rate=learning_rate,
                             momentum=momentum,
                             epoch=epoch,
                             weights_path="{}_epoch-{}.cnn".format(
                                 weights_temp, epoch))
        epoch += 1
        del train_ds