class SigmoidLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation (logistic function)
      self.output.setOutput(1.0 / (1.0 + gpu.exp(-self.input.getNetInput())))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
Example #2
0
class SigmoidLayer(AbstractLayer):
    """
   A layer which implements sigmoid activation
   """
    def __init__(self, layerSize):
        """
      A sigmoid layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using logistic function
      """

        # Perform the activation (logistic function)
        self.output.setOutput(1.0 / (1.0 + gpu.exp(-self.input.getNetInput())))

    def backward(self):
        """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """

        self.input.setDelta(self.output.getOutput() *
                            (1.0 - self.output.getOutput()) *
                            self.output.getNetDelta())
class AbsoluteValueLayer(AbstractLayer):
    """
   A layer which implements absolute value activation
   """
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using absolute value
      """

        # Perform the activation (logistic function)
        self.output.setOutput(np.abs(self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is the sign of the output
      """

        self.input.setDelta(
            np.sign(self.output.getOutput()) * self.output.getNetDelta())
class AbsoluteValueLayer(AbstractLayer):
   """
   A layer which implements absolute value activation
   """

   def __init__(self, layerSize):
      """
      An absolute value layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using absolute value
      """

      # Perform the activation (logistic function)
      self.output.setOutput(np.abs(self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is the sign of the output
      """
            
      self.input.setDelta(np.sign(self.output.getOutput()) * self.output.getNetDelta())
Example #5
0
class ReluLayer(AbstractLayer):
    """
   A layer which implements rectified linear activation
   """
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using the soft ReLU function
      """

        # Perform the activation (set any negative values to zero)
        self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """

        self.input.setDelta(
            np.where(self.output.getOutput() > 0, 1.0, 0.0) *
            self.output.getNetDelta())
Example #6
0
class ReluLayer(AbstractLayer):
   """
   A layer which implements rectified linear activation
   """

   def __init__(self, layerSize):
      """
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A ReLU layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using the soft ReLU function
      """

      # Perform the activation (set any negative values to zero)
      self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """
            
      self.input.setDelta(np.where(self.output.getOutput() > 0, 1.0, 0.0) * self.output.getNetDelta())
class SoftmaxLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation
      net_input = self.input.getNetInput()
      net_input[net_input > 10.0] = 10.0
      net_input[net_input < -10.0] = -10.0

      self.output.setOutput(np.exp(net_input) + 1e-10)
      self.output.setOutput(self.output.getOutput() / (np.array([np.sum(self.output.getOutput(),1)]).transpose()))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
#      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
      self.input.setDelta(self.output.getNetDelta())