class AbsoluteValueLayer(AbstractLayer):
    """
   A layer which implements absolute value activation
   """
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using absolute value
      """

        # Perform the activation (logistic function)
        self.output.setOutput(np.abs(self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is the sign of the output
      """

        self.input.setDelta(
            np.sign(self.output.getOutput()) * self.output.getNetDelta())
예제 #2
0
class TanhLayer(AbstractLayer):
    """
   A layer which implements sigmoid activation
   """
    def __init__(self, layerSize):
        """
      A sigmoid layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using logistic function
      """

        # Perform the activation (logistic function)
        self.output.setOutput((1.0 - np.exp(-self.input.getNetInput())) /
                              (1.0 + np.exp(-self.input.getNetInput())))

    def backward(self):
        """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """

        self.input.setDelta(
            (1.0 - self.output.getOutput()**2) * self.output.getNetDelta())
class AbsoluteValueLayer(AbstractLayer):
   """
   A layer which implements absolute value activation
   """

   def __init__(self, layerSize):
      """
      An absolute value layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using absolute value
      """

      # Perform the activation (logistic function)
      self.output.setOutput(np.abs(self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is the sign of the output
      """
            
      self.input.setDelta(np.sign(self.output.getOutput()) * self.output.getNetDelta())
예제 #4
0
class SigmoidLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation (logistic function)
      self.output.setOutput(1.0 / (1.0 + gpu.exp(-self.input.getNetInput())))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
예제 #5
0
class ReluLayer(AbstractLayer):
    """
   A layer which implements rectified linear activation
   """
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using the soft ReLU function
      """

        # Perform the activation (set any negative values to zero)
        self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """

        self.input.setDelta(
            np.where(self.output.getOutput() > 0, 1.0, 0.0) *
            self.output.getNetDelta())
예제 #6
0
class ReluLayer(AbstractLayer):
   """
   A layer which implements rectified linear activation
   """

   def __init__(self, layerSize):
      """
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A ReLU layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using the soft ReLU function
      """

      # Perform the activation (set any negative values to zero)
      self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """
            
      self.input.setDelta(np.where(self.output.getOutput() > 0, 1.0, 0.0) * self.output.getNetDelta())
예제 #7
0
class SoftmaxLayer(AbstractLayer):
    """
   A layer which implements sigmoid activation
   """
    def __init__(self, layerSize):
        """
      A sigmoid layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using logistic function
      """

        # Perform the activation
        self.output.setOutput(gpu.exp(self.input.getNetInput()))
        self.output.setOutput(
            self.output.getOutput() /
            (gpu.garray([gpu.sum(self.output.getOutput(), 1)]).transpose()))

    def backward(self):
        """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """

        #      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
        self.input.setDelta(self.output.getNetDelta())
예제 #8
0
class DelayLayer(AbstractLayer):
   """
   A layer which implements a delay in time
   """

   def __init__(self, layerSize, initialHistory):
      """
      A delay layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A delay layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)

      # A delay layer has a history, which propagates forward
      # when step is called
      self.initial_history = initialHistory
      self.history = np.zeros((1,layerSize))
      self.current_step = 0


   def forward(self):
      """
      Perform a forward step - set the output to the current history
      """

      # Is this the first timestep?  Then adjust the shape of history to match
      # the shape of the input
      if self.current_step == 0:
         net_input = self.input.getNetInput()
         self.history = np.zeros(net_input.shape)
         self.history[:] = self.initial_history

      # Propagate the history forward, and set the input to the history
      self.output.setOutput(self.history)


   def backward(self):
      """
      Perform the backprop step - simply shift the delta backward
      """

      self.input.setDelta(self.output.getNetDelta())

   def step(self):
      """
      Step forward in time.  Set the history to the current input
      """

      self.history = self.input.getNetInput()
      self.current_step += 1
예제 #9
0
class DelayLayer(AbstractLayer):
    """
   A layer which implements a delay in time
   """
    def __init__(self, layerSize, initialHistory):
        """
      A delay layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A delay layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

        # A delay layer has a history, which propagates forward
        # when step is called
        self.initial_history = initialHistory
        self.history = np.zeros((1, layerSize))
        self.current_step = 0

    def forward(self):
        """
      Perform a forward step - set the output to the current history
      """

        # Is this the first timestep?  Then adjust the shape of history to match
        # the shape of the input
        if self.current_step == 0:
            net_input = self.input.getNetInput()
            self.history = np.zeros(net_input.shape)
            self.history[:] = self.initial_history

        # Propagate the history forward, and set the input to the history
        self.output.setOutput(self.history)

    def backward(self):
        """
      Perform the backprop step - simply shift the delta backward
      """

        self.input.setDelta(self.output.getNetDelta())

    def step(self):
        """
      Step forward in time.  Set the history to the current input
      """

        self.history = self.input.getNetInput()
        self.current_step += 1
예제 #10
0
class InputLayer(AbstractLayer):
   """
   An input layer
   """

   def __init__(self, inputSize):
      """
      Create an input layer, with batchSize rows and inputSize columns
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # This layer only has a output port. 
      self.output = OutputPort(inputSize)


   def setInput(self, inputBatch):
      """
      Set the input to the provided batch
      """

      # Set the output of the output port to the provided batch
      self.output.setOutput(inputBatch)


   def forward(self):
      """
      Perform a forward step
      """

      # Since there's only the output port, there's nothing to do
      pass


   def backward(self):
      """
      Perform a backprop step
      """

      # Input layers have no need to backprop error -- there is nothing 
      # to backprop to
      pass
예제 #11
0
class SoftmaxLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation
      net_input = self.input.getNetInput()
      net_input[net_input > 10.0] = 10.0
      net_input[net_input < -10.0] = -10.0

      self.output.setOutput(np.exp(net_input) + 1e-10)
      self.output.setOutput(self.output.getOutput() / (np.array([np.sum(self.output.getOutput(),1)]).transpose()))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
#      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
      self.input.setDelta(self.output.getNetDelta())
예제 #12
0
class InputLayer(AbstractLayer):
    """
   An input layer
   """
    def __init__(self, inputSize):
        """
      Create an input layer, with batchSize rows and inputSize columns
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # This layer only has a output port.
        self.output = OutputPort(inputSize)

    def setInput(self, inputBatch):
        """
      Set the input to the provided batch
      """

        # Set the output of the output port to the provided batch
        self.output.setOutput(inputBatch)

    def forward(self):
        """
      Perform a forward step
      """

        # Since there's only the output port, there's nothing to do
        pass

    def backward(self):
        """
      Perform a backprop step
      """

        # Input layers have no need to backprop error -- there is nothing
        # to backprop to
        pass