Example #1
0
File: svb.py Project: meobet/vne
 def top_n(self, x, y, n):
     inputs = to_binary(torch.from_numpy(x).long(),
                        (x.shape[0], self.input_dim),
                        use_cuda=self.use_cuda)
     inputs = variable(self, inputs)
     return numpy(self, self.predict_from_posterior(
         inputs, z_mean=False)).argsort(axis=1)[:, -1:-n - 1:-1]
Example #2
0
File: svb.py Project: meobet/vne
 def rank(self, x, y):
     inputs = to_binary(torch.from_numpy(x).long(),
                        (x.shape[0], self.input_dim),
                        use_cuda=self.use_cuda)
     inputs = variable(self, inputs)
     candidates = [t[t > 0].astype(int) for t in x]
     outputs = [
         sorted(zip(x, y[x]), key=lambda t: t[1])
         for x, y in zip(candidates, numpy(self, self.predict(inputs)))
     ]
     return [[y[0] for y in x[::-1]] for x in outputs]
Example #3
0
File: svb.py Project: meobet/vne
    def fit_direct(self, dataset, batch_size, num_epochs=1, verbose=0):
        self.train(True)
        if self.optimizer is None:
            self.build_optimizer()

        fit_loss = []
        for epoch in range(num_epochs):
            self.lr = self.lr_scheduler(self.optimizer, epoch)
            epoch_loss = []
            timer = time.time()

            # Iterate over data.
            for x, y in dataset.batches(batch_size):
                # get the inputs
                inputs = to_binary(torch.from_numpy(x).long(),
                                   (x.shape[0], self.input_dim),
                                   use_cuda=self.use_cuda)
                inputs = variable(self, inputs)
                labels = variable(self, torch.from_numpy(y).long())

                # zero the parameter gradients
                self.optimizer.zero_grad()
                label_loss, KLD = self.loss(inputs)
                loss = label_loss + KLD
                loss.backward()
                self.optimizer.step()

                # statistics
                if verbose > 1:
                    print("Batch", len(epoch_loss), "loss:",
                          numpy(self, loss), "=", numpy(self, label_loss), "+",
                          numpy(self, KLD), "average time:",
                          (time.time() - timer) / float(len(epoch_loss) + 1))
                epoch_loss.append((numpy(self,
                                         label_loss)[0], numpy(self, KLD)[0]))
            if verbose > 0:
                print("loss =", np.mean(epoch_loss, axis=0), "time =",
                      time.time() - timer)
            fit_loss.append(np.mean(epoch_loss))
        return fit_loss
Example #4
0
    def rank(self, dataset, batch_size, num_batches=None, verbose=0):
        self.train(False)
        timer = time.time()
        result = []
        batch_count = 0.
        # Iterate over data.
        for x, y in dataset.batches(batch_size=batch_size):
            # get the inputs
            inputs, labels = variable(
                self, (torch.from_numpy(x).long(), torch.from_numpy(y).long()))
            batch_count += 1
            if num_batches is not None and batch_count > num_batches:
                break
            if verbose > 0:
                print("Batch", batch_count, "average time:",
                      (time.time() - timer) / batch_count)
            candidates = [x[x > 0] for x in numpy(self, inputs)]
            outputs = [
                sorted(zip(x, y[x]), key=lambda t: t[1])
                for x, y in zip(candidates, numpy(self, self.predict(inputs)))
            ]
            result.extend([[y[0] for y in x[::-1]] for x in outputs])

        return result
Example #5
0
    def top_n(self, dataset, n, batch_size, num_batches=None, verbose=0):
        self.train(False)
        timer = time.time()
        result = []
        batch_count = 0.
        # Iterate over data.
        for x, y in dataset.batches(batch_size=batch_size):
            # get the inputs
            inputs, labels = variable(
                self, (torch.from_numpy(x).long(), torch.from_numpy(y).long()))
            batch_count += 1
            if num_batches is not None and batch_count > num_batches:
                break
            if verbose > 0:
                print("Batch", batch_count, "average time:",
                      (time.time() - timer) / batch_count)
            result.append(
                numpy(self, self.predict(inputs)).argsort(axis=1)[:, -1:-n -
                                                                  1:-1].copy())

        return np.vstack(result)
Example #6
0
File: svb.py Project: meobet/vne
 def sample_from_latent(self, num_samples, top_n):
     self.train(False)
     latents = variable(self,
                        torch.randn(num_samples, self.num_latent_factors))
     return numpy(self, self.decode(latents)).argsort(axis=1)[:, -1:-top_n-1:-1].copy(), \
            np.linalg.norm(numpy(self, latents), axis=1)