Exemplo n.º 1
0
    def validation_epoch_end(self, outputs):
        metrics = [
            'val_loss', 'val/recon_loss', 'val/kl_div_z', 'val/kl_div_b',
            'val_rec_err'
        ]
        tensorboard_logs = {}
        for m in metrics:
            loss_f = lambda x: x['log'][m]
            losses = list(map(loss_f, outputs))
            rec_err = sum(losses) / len(losses)
            self.logger.experiment.add_scalar(m, rec_err, self.global_step)
            self.log(m, rec_err)
            tensorboard_logs[m] = rec_err

        if (self.gt_eigvectors is not None) and (self.gt_eigs is not None):
            ms = metric_subspace(self.vae, self.gt_eigvectors, self.gt_eigs)
            ma = metric_alignment(self.vae, self.gt_eigvectors)
            mp = metric_procrustes(self.vae, self.gt_eigvectors)
            mr = metric_pairwise(self.vae, self.gt_eigvectors, self.gt_eigs)
            tlog = {'subspace_distance': ms, 'alignment': ma, 'procrustes': mp}
            self.logger.experiment.add_scalar('procrustes', mp,
                                              self.global_step)
            self.logger.experiment.add_scalar('pairwise_r', mr,
                                              self.global_step)
            self.logger.experiment.add_scalar('subspace_distance', ms,
                                              self.global_step)
            self.logger.experiment.add_scalar('alignment', ma,
                                              self.global_step)
            tensorboard_logs = {**tensorboard_logs, **tlog}

        return {'val_loss': rec_err, 'log': tensorboard_logs}
Exemplo n.º 2
0
    def validation_epoch_end(self, outputs):
        loss_f = lambda x: x['log']['val_rec_err']
        losses = list(map(loss_f, outputs))
        rec_err = sum(losses) / len(losses)
        self.logger.experiment.add_scalar('val_rec_err', rec_err,
                                          self.global_step)

        loss_f = lambda x: x['log']['val_loss']
        losses = list(map(loss_f, outputs))
        loss = sum(losses) / len(losses)
        self.logger.experiment.add_scalar('val_loss', loss, self.global_step)
        self.log('val_loss', loss)

        # Commenting out, since it is too slow...
        # ortho, eig_err = metric_orthogonality(self.vae)
        # self.logger.experiment.add_scalar('orthogonality',
        #                                   ortho, self.global_step)
        # tensorboard_logs = dict(
        #     [('val_loss', loss),
        #      ('orthogonality', ortho),
        #      ('eigenvalue-error', eig_err)]
        # )
        tensorboard_logs = {'val_loss': loss, 'val_rec_error': rec_err}

        if (self.gt_eigvectors is not None) and (self.gt_eigs is not None):
            ms = metric_subspace(self.vae, self.gt_eigvectors, self.gt_eigs)
            ma = metric_alignment(self.vae, self.gt_eigvectors)
            mp = metric_procrustes(self.vae, self.gt_eigvectors)
            mr = metric_pairwise(self.vae, self.gt_eigvectors, self.gt_eigs)
            tlog = {'subspace_distance': ms, 'alignment': ma, 'procrustes': mp}
            self.logger.experiment.add_scalar('procrustes', mp,
                                              self.global_step)
            self.logger.experiment.add_scalar('pairwise_r', mr,
                                              self.global_step)
            self.logger.experiment.add_scalar('subspace_distance', ms,
                                              self.global_step)
            self.logger.experiment.add_scalar('alignment', ma,
                                              self.global_step)
            tensorboard_logs = {**tensorboard_logs, **tlog}

        return {'val_loss': loss, 'log': tensorboard_logs}
Exemplo n.º 3
0
    def validation_epoch_end(self, outputs):
        loss_f = lambda x: x['log']['val_rec_err']
        losses = list(map(loss_f, outputs))
        rec_err = sum(losses) / len(losses)
        self.logger.experiment.add_scalar('val_rec_err', rec_err,
                                          self.global_step)
        if self.hparams.encoder_depth == 1:
            mt = metric_transpose_theorem(self.model)
            self.logger.experiment.add_scalar('transpose', mt,
                                              self.global_step)
        ortho, eig_err = metric_orthogonality(self.model)
        self.logger.experiment.add_scalar('orthogonality', ortho,
                                          self.global_step)

        tensorboard_logs = dict([
            ('val_loss', rec_err),
            # ('transpose', mt),
            ('orthogonality', ortho),
            ('eigenvalue-error', eig_err)
        ])

        if (self.gt_eigvectors is not None) and (self.gt_eigs is not None):
            ms = metric_subspace(self.model, self.gt_eigvectors, self.gt_eigs)
            ma = metric_alignment(self.model, self.gt_eigvectors)
            mp = metric_procrustes(self.model, self.gt_eigvectors)
            mr = metric_pairwise(self.model, self.gt_eigvectors, self.gt_eigs)
            tlog = {'subspace_distance': ms, 'alignment': ma, 'procrustes': mp}
            self.logger.experiment.add_scalar('procrustes', mp,
                                              self.global_step)
            self.logger.experiment.add_scalar('pairwise_r', mr,
                                              self.global_step)
            self.logger.experiment.add_scalar('subspace_distance', ms,
                                              self.global_step)
            self.logger.experiment.add_scalar('alignment', ma,
                                              self.global_step)
            tensorboard_logs = {**tensorboard_logs, **tlog}

        return {'val_loss': rec_err, 'log': tensorboard_logs}