Exemplo n.º 1
0
class TanhLayer(AbstractLayer):
    """
   A layer which implements sigmoid activation
   """
    def __init__(self, layerSize):
        """
      A sigmoid layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using logistic function
      """

        # Perform the activation (logistic function)
        self.output.setOutput((1.0 - np.exp(-self.input.getNetInput())) /
                              (1.0 + np.exp(-self.input.getNetInput())))

    def backward(self):
        """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """

        self.input.setDelta(
            (1.0 - self.output.getOutput()**2) * self.output.getNetDelta())
Exemplo n.º 2
0
class SigmoidLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation (logistic function)
      self.output.setOutput(1.0 / (1.0 + gpu.exp(-self.input.getNetInput())))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
Exemplo n.º 3
0
class ReluLayer(AbstractLayer):
   """
   A layer which implements rectified linear activation
   """

   def __init__(self, layerSize):
      """
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A ReLU layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using the soft ReLU function
      """

      # Perform the activation (set any negative values to zero)
      self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """
            
      self.input.setDelta(np.where(self.output.getOutput() > 0, 1.0, 0.0) * self.output.getNetDelta())
class AbsoluteValueLayer(AbstractLayer):
   """
   A layer which implements absolute value activation
   """

   def __init__(self, layerSize):
      """
      An absolute value layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using absolute value
      """

      # Perform the activation (logistic function)
      self.output.setOutput(np.abs(self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is the sign of the output
      """
            
      self.input.setDelta(np.sign(self.output.getOutput()) * self.output.getNetDelta())
class AbsoluteValueLayer(AbstractLayer):
    """
   A layer which implements absolute value activation
   """
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using absolute value
      """

        # Perform the activation (logistic function)
        self.output.setOutput(np.abs(self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is the sign of the output
      """

        self.input.setDelta(
            np.sign(self.output.getOutput()) * self.output.getNetDelta())
Exemplo n.º 6
0
class ReluLayer(AbstractLayer):
    """
   A layer which implements rectified linear activation
   """
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using the soft ReLU function
      """

        # Perform the activation (set any negative values to zero)
        self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """

        self.input.setDelta(
            np.where(self.output.getOutput() > 0, 1.0, 0.0) *
            self.output.getNetDelta())
Exemplo n.º 7
0
    def __init__(self, inputSize):
        """
      Create an input layer, with batchSize rows and inputSize columns
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # This layer only has a output port.
        self.output = OutputPort(inputSize)
Exemplo n.º 8
0
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
Exemplo n.º 10
0
class DelayLayer(AbstractLayer):
   """
   A layer which implements a delay in time
   """

   def __init__(self, layerSize, initialHistory):
      """
      A delay layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A delay layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)

      # A delay layer has a history, which propagates forward
      # when step is called
      self.initial_history = initialHistory
      self.history = np.zeros((1,layerSize))
      self.current_step = 0


   def forward(self):
      """
      Perform a forward step - set the output to the current history
      """

      # Is this the first timestep?  Then adjust the shape of history to match
      # the shape of the input
      if self.current_step == 0:
         net_input = self.input.getNetInput()
         self.history = np.zeros(net_input.shape)
         self.history[:] = self.initial_history

      # Propagate the history forward, and set the input to the history
      self.output.setOutput(self.history)


   def backward(self):
      """
      Perform the backprop step - simply shift the delta backward
      """

      self.input.setDelta(self.output.getNetDelta())

   def step(self):
      """
      Step forward in time.  Set the history to the current input
      """

      self.history = self.input.getNetInput()
      self.current_step += 1
Exemplo n.º 11
0
class DelayLayer(AbstractLayer):
    """
   A layer which implements a delay in time
   """
    def __init__(self, layerSize, initialHistory):
        """
      A delay layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A delay layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

        # A delay layer has a history, which propagates forward
        # when step is called
        self.initial_history = initialHistory
        self.history = np.zeros((1, layerSize))
        self.current_step = 0

    def forward(self):
        """
      Perform a forward step - set the output to the current history
      """

        # Is this the first timestep?  Then adjust the shape of history to match
        # the shape of the input
        if self.current_step == 0:
            net_input = self.input.getNetInput()
            self.history = np.zeros(net_input.shape)
            self.history[:] = self.initial_history

        # Propagate the history forward, and set the input to the history
        self.output.setOutput(self.history)

    def backward(self):
        """
      Perform the backprop step - simply shift the delta backward
      """

        self.input.setDelta(self.output.getNetDelta())

    def step(self):
        """
      Step forward in time.  Set the history to the current input
      """

        self.history = self.input.getNetInput()
        self.current_step += 1
Exemplo n.º 12
0
    def __init__(self, size, initialHistory=np.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = np.copy(initialHistory)
        self.initialHistory = initialHistory
Exemplo n.º 13
0
    def __init__(self, size, initialHistory=gpu.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = gpu.garray(np.copy(
            initialHistory.as_numpy_array()))
        self.initialHistory = initialHistory
Exemplo n.º 14
0
    def __init__(self, layerSize, initialHistory):
        """
      A delay layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A delay layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

        # A delay layer has a history, which propagates forward
        # when step is called
        self.initial_history = initialHistory
        self.history = np.zeros((1, layerSize))
        self.current_step = 0
Exemplo n.º 15
0
class SoftmaxLayer(AbstractLayer):
    """
   A layer which implements sigmoid activation
   """
    def __init__(self, layerSize):
        """
      A sigmoid layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using logistic function
      """

        # Perform the activation
        self.output.setOutput(gpu.exp(self.input.getNetInput()))
        self.output.setOutput(
            self.output.getOutput() /
            (gpu.garray([gpu.sum(self.output.getOutput(), 1)]).transpose()))

    def backward(self):
        """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """

        #      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
        self.input.setDelta(self.output.getNetDelta())
Exemplo n.º 16
0
   def __init__(self, layerSize):
      """
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)
Exemplo n.º 17
0
class InputLayer(AbstractLayer):
   """
   An input layer
   """

   def __init__(self, inputSize):
      """
      Create an input layer, with batchSize rows and inputSize columns
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # This layer only has a output port. 
      self.output = OutputPort(inputSize)


   def setInput(self, inputBatch):
      """
      Set the input to the provided batch
      """

      # Set the output of the output port to the provided batch
      self.output.setOutput(inputBatch)


   def forward(self):
      """
      Perform a forward step
      """

      # Since there's only the output port, there's nothing to do
      pass


   def backward(self):
      """
      Perform a backprop step
      """

      # Input layers have no need to backprop error -- there is nothing 
      # to backprop to
      pass
Exemplo n.º 18
0
   def __init__(self, inputSize):
      """
      Create an input layer, with batchSize rows and inputSize columns
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # This layer only has a output port. 
      self.output = OutputPort(inputSize)
   def __init__(self, layerSize):
      """
      An absolute value layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)
Exemplo n.º 20
0
class InputLayer(AbstractLayer):
    """
   An input layer
   """
    def __init__(self, inputSize):
        """
      Create an input layer, with batchSize rows and inputSize columns
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # This layer only has a output port.
        self.output = OutputPort(inputSize)

    def setInput(self, inputBatch):
        """
      Set the input to the provided batch
      """

        # Set the output of the output port to the provided batch
        self.output.setOutput(inputBatch)

    def forward(self):
        """
      Perform a forward step
      """

        # Since there's only the output port, there's nothing to do
        pass

    def backward(self):
        """
      Perform a backprop step
      """

        # Input layers have no need to backprop error -- there is nothing
        # to backprop to
        pass
Exemplo n.º 21
0
   def __init__(self, size, initialHistory=gpu.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = gpu.garray(np.copy(initialHistory.as_numpy_array()))
      self.initialHistory = initialHistory
Exemplo n.º 22
0
   def __init__(self, size, initialHistory=np.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = np.copy(initialHistory)
      self.initialHistory = initialHistory
Exemplo n.º 23
0
   def __init__(self, inputSize, outputSizes):
      """
      Create a layer which splits the input into the provided output sizes
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # Should probably have an assertion that the output sizes add up to the
      # input sizes

      # A sigmoid layer has an input port and output port
      self.input = InputPort(inputSize)
      self.outputPorts = []
      for size in outputSizes:
         self.outputPorts.append(OutputPort(size))
Exemplo n.º 24
0
   def __init__(self, layerSize, initialHistory):
      """
      A delay layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A delay layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)

      # A delay layer has a history, which propagates forward
      # when step is called
      self.initial_history = initialHistory
      self.history = np.zeros((1,layerSize))
      self.current_step = 0
Exemplo n.º 25
0
class SoftmaxLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation
      net_input = self.input.getNetInput()
      net_input[net_input > 10.0] = 10.0
      net_input[net_input < -10.0] = -10.0

      self.output.setOutput(np.exp(net_input) + 1e-10)
      self.output.setOutput(self.output.getOutput() / (np.array([np.sum(self.output.getOutput(),1)]).transpose()))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
#      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
      self.input.setDelta(self.output.getNetDelta())
Exemplo n.º 26
0
class HistoryLayer(AbstractLayer):
    """
   A useful internal layer for Recurrent Layers which maintains a history
   of activations.
   """
    def __init__(self, size, initialHistory=np.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = np.copy(initialHistory)
        self.initialHistory = initialHistory

    def forward(self):
        """
      Do nothing.  step handles this layer correctly
      """

        pass

    def backward(self):
        """
      Do nothing.  backstep handles this layer correctly
      """

        pass

    def step(self):
        """
      Push the current output into the history, and propagate input forward
      """

        self.history.append(self.output.value[:])
        self.output.value = self.input.getNetInput()

    def backstep(self):
        """
      Pop the output from the history, and propagate the delta backward
      """

        self.input.setDelta(self.output.getNetDelta())
        self.output.value = self.history.pop()

    def reset(self):
        """
      Reset the history to empty and output to initialHistory
      """

        self.history = []
        self.output.value[:] = self.initialHistory

    def setDelta(self, delta):
        """
      Set the delta on the input layer to the provided value
      """

        self.input.setDelta(delta)
Exemplo n.º 27
0
class HistoryLayer(AbstractLayer):
   """
   A useful internal layer for Recurrent Layers which maintains a history
   of activations.
   """

   def __init__(self, size, initialHistory=gpu.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = gpu.garray(np.copy(initialHistory.as_numpy_array()))
      self.initialHistory = initialHistory


   def forward(self):
      """
      Do nothing.  step handles this layer correctly
      """
  
      pass


   def backward(self):
      """
      Do nothing.  backstep handles this layer correctly
      """

      pass


   def step(self):
      """
      Push the current output into the history, and propagate input forward
      """

      self.history.append(self.output.value[:])
      self.output.value = self.input.getNetInput()


   def backstep(self):
      """
      Pop the output from the history, and propagate the delta backward
      """

      self.input.setDelta(self.output.getNetDelta())
      self.output.value = self.history.pop()


   def reset(self):
      """
      Reset the history to empty and output to initialHistory
      """

      self.history = []
      self.output.value[:] = self.initialHistory


   def setDelta(self, delta):
      """
      Set the delta on the input layer to the provided value
      """

      self.input.setDelta(delta)