Beispiel #1
0
    def setUp(self):

        #########
        # toy data

        device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

        self.X = torch.tensor([[1, 2], [3, 4]],
                              dtype=torch.float,
                              device=device)
        self.Y = torch.tensor([0, 1], dtype=torch.int64, device=device)

        self.train_loader = torch.utils.data.DataLoader(
            torch.utils.data.TensorDataset(self.X, self.Y), batch_size=2)
        #########
        # base kn
        self.kn = greedyFeedforward()
        self.kn.add_layer(
            kFullyConnected(X=self.X,
                            n_out=2,
                            kernel='gaussian',
                            sigma=3,
                            bias=True))
        self.kn.add_layer(
            kFullyConnected(X=self.X,
                            n_out=2,
                            kernel='gaussian',
                            sigma=2,
                            bias=True))

        # manually set some weights
        self.kn.layer0.weight.data = torch.Tensor([[.1, .2], [.5, .7]])
        self.kn.layer0.bias.data = torch.Tensor([0., 0.])
        self.kn.layer1.weight.data = torch.Tensor([[1.2, .3], [.2, 1.7]])
        self.kn.layer1.bias.data = torch.Tensor([0.1, 0.2])

        self.kn.add_critic(self.kn.layer1.phi)
        self.kn.add_loss(torch.nn.CosineSimilarity())
        self.kn.add_metric(torch.nn.CosineSimilarity())
        self.kn.add_loss(torch.nn.CrossEntropyLoss(reduction='sum'))
        self.kn.add_metric(torch.nn.CrossEntropyLoss(reduction='sum'))

        #########
        # ensemble

        self.kn_ensemble = greedyFeedforward()
        self.kn_ensemble.add_layer(K.to_ensemble(self.kn.layer0, batch_size=1))
        self.kn_ensemble.add_layer(K.to_ensemble(self.kn.layer1, batch_size=1))
        self.kn_ensemble.add_critic(self.kn.layer1.phi)
        self.kn_ensemble.add_loss(torch.nn.CosineSimilarity())
        self.kn_ensemble.add_metric(torch.nn.CosineSimilarity())
        self.kn_ensemble.add_loss(torch.nn.CrossEntropyLoss(reduction='sum'))
        self.kn_ensemble.add_metric(torch.nn.CrossEntropyLoss(reduction='sum'))

        self.kn.to(device)
        self.kn_ensemble.to(device)
Beispiel #2
0
    def to_ensemble_(self, batch_size):
        """
        Convert layers in the stack into equivalent ensemble layers.
        Note that this is an in-place function.

        Parameters
        ----------
        batch_size : int
            Size of each component layer in the ensemble. One batch_size for 
            all layers in the stack.
        """
        for i in range(self._comp_counter):
            component = getattr(self, 'comp'+str(i))
            setattr(self, 'comp'+str(i), K.to_ensemble(component, batch_size))
Beispiel #3
0
 def to_ensemble(self, batch_size):
     return K.to_ensemble(self, batch_size)