Ejemplo n.º 1
0
    def grad_check(self, x, y, outfd=sys.stderr, **kwargs):

        bptt_old = self.bptt
        self.bptt = len(y)
        print >> outfd, "NOTE: temporarily setting self.bptt = len(y) = %d to compute true gradient." % self.bptt
        NNBase.grad_check(self, x, y, outfd=outfd, **kwargs)
        self.bptt = bptt_old
        print >> outfd, "Reset self.bptt = %d" % self.bptt
Ejemplo n.º 2
0
    def grad_check(self, x, y, outfd=sys.stderr, **kwargs):
        """
        Wrapper for gradient check on RNNs;
        ensures that backprop-through-time is run to completion,
        computing the full gradient for the loss as summed over
        the input sequence and predictions.

        Do not modify this function!
        """
        NNBase.grad_check(self, x, y, outfd=outfd, **kwargs)
Ejemplo n.º 3
0
 def grad_check(self, x, y, outfd=sys.stderr, **kwargs):
     """
     Wrapper for gradient check on RNNs;
     ensures that backprop-through-time is run to completion,
     computing the full gradient for the loss as summed over
     the input sequence and predictions.
     Do not modify this function!
     """
     bptt_old = self.bptt
     self.bptt = len(y)
     print >> outfd, "NOTE: temporarily setting self.bptt = len(y) = %d to compute true gradient." % self.bptt
     NNBase.grad_check(self, x, y, outfd=outfd, **kwargs)
     self.bptt = bptt_old
     print >> outfd, "Reset self.bptt = %d" % self.bptt
Ejemplo n.º 4
0
 def grad_check(self, x, y, outfd=sys.stderr, **kwargs):
     """
     Wrapper for gradient check on RNNs;
     ensures that backprop-through-time is run to completion,
     computing the full gradient for the loss as summed over
     the input sequence and predictions.
     Do not modify this function!
     """
     bptt_old = self.bptt
     self.bptt = len(y)
     print >> outfd, "NOTE: temporarily setting self.bptt = len(y) = %d to compute true gradient." % self.bptt
     NNBase.grad_check(self, x, y, outfd=outfd, **kwargs)
     self.bptt = bptt_old
     print >> outfd, "Reset self.bptt = %d" % self.bptt
Ejemplo n.º 5
0
    def grad_check(self, X, y, outfd=sys.stderr, **kwargs):
        """
        Wrapper for gradient check on RNNs;
        ensures that backprop-through-time is run to completion,
        computing the full gradient for the loss as summed over
        the input sequence and predictions.

        Do not modify this function!
        """
        # if not recurssive yet this setting of bptt does not matter
        bptt_old = self.bptt
        # single example
        if isinstance(X, ndarray): X = [X]

        for i in range(len(X)):
            self.bptt = X[i].shape[0]
            print("NOTE: temporarily setting self.bptt = len(y) = %d to compute true gradient." % self.bptt, file=outfd)
            NNBase.grad_check(self, X[i], y[i], outfd=outfd, **kwargs)

        self.bptt = bptt_old
        print("Reset self.bptt = %d" % self.bptt, file=outfd)