def _loss(self, y_true, y_pred): """Loss function for keras training. We assume a model of the form P(x)=exp(-E(x))P_0(x)/Z. We minimize the neg-loglikelihood: <-logP> = log(Z) - <-E>. Normalization of P gives Z=<exp(-E)>_{P_0}. We fix the gauge by adding the constraint (Z-1)**2 to the likelihood. """ data= ksum((-y_pred)*(1.-y_true))/ksum(1.-y_true) gen= klog(ksum(kexp(-y_pred)*y_true))-klog(ksum(y_true)) reg= kexp(gen)-1. return gen-data+self.gamma*reg*reg
def _likelihood(self, y_true, y_pred): """Loss function for keras training. We assume a model of the form P(x)=exp(-E(x))P_0(x)/Z. We minimize the neg-loglikelihood: <-logP> = log(Z) - <-E>. Normalization of P gives Z=<exp(-E)>_{P_0}.z """ data= ksum((-y_pred)*(1.-y_true))/ksum(1.-y_true) gen= klog(ksum(kexp(-y_pred)*y_true))-klog(ksum(y_true)) return gen-data