def report_val(self, loss): logging.info("[val] [it: {}] [{}: {}] [previous best {}: {}]".format( self.it, self.criterion.evaluate_loss_name(), loss, self.criterion.evaluate_loss_name(), self.best_val_loss)) writer.add_scalar("val_{}".format(self.criterion.evaluate_loss_name()), loss, global_step=self.it)
def report_test(self, loss): logging.info("[test] [it: {}] [{}: {}]".format( self.it, self.criterion.evaluate_loss_name(), loss)) writer.add_scalar("test_{}".format( self.criterion.evaluate_loss_name()), loss, global_step=self.it)
def log_likelihood(self, it, **kwargs): ais = config.get("others", "ais", default=False) if ais: self.model.update_log_partition() ll = evaluate(self.te, self.runner, lambda v: self.model.log_likelihood(v, ais=ais), batch_size=min(10000, len(self.te))) logging.info("[log_likelihood] [it: {}] [log_likelihood: {}]".format(it, ll)) writer.add_scalar("log_likelihood", ll, global_step=it)
def report_train(self, loss, **kwargs): logging.info("[train] [it: {}] [{}: {}]".format( self.it, self.criterion.train_loss_name(), loss)) writer.add_scalar("train_{}".format(self.criterion.train_loss_name()), loss, global_step=self.it) for k, v in kwargs.items(): writer.add_scalar("{}".format(k), v, global_step=self.it)
def classify(self, it, **kwargs): tr_features, tr_ys = extract_labelled_feature(self.q, self.labelled_tr) te_features, te_ys = extract_labelled_feature(self.q, self.labelled_te) classifiers = kwargs.get("classifiers", "lsvm") if not isinstance(classifiers, list): assert isinstance(classifiers, str) if classifiers == "all": classifiers = ["kn", "svm", "lsvm", "logistic"] else: classifiers = [classifiers] for classifier in classifiers: acc = classify_features(tr_features, tr_ys, te_features, te_ys, classifier)[-1] logging.info("[{} classify] [it: {}] [acc: {}]".format(classifier, it, acc)) writer.add_scalar("{}_classify".format(classifier), acc, global_step=it)
def fisher(self, it, **kwargs): fisher_ = evaluate(self.te, self.runner, lambda v: criterions.ssm(v, self.model), batch_size=min(10000, len(self.te))) logging.info("[fisher] [it: {}] [fisher: {}]".format(it, fisher_)) writer.add_scalar("fisher", fisher_, global_step=it)