Ejemplo n.º 1
0
class SigmoidLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation (logistic function)
      self.output.setOutput(1.0 / (1.0 + gpu.exp(-self.input.getNetInput())))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
Ejemplo n.º 2
0
class TanhLayer(AbstractLayer):
    """
   A layer which implements sigmoid activation
   """
    def __init__(self, layerSize):
        """
      A sigmoid layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using logistic function
      """

        # Perform the activation (logistic function)
        self.output.setOutput((1.0 - np.exp(-self.input.getNetInput())) /
                              (1.0 + np.exp(-self.input.getNetInput())))

    def backward(self):
        """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """

        self.input.setDelta(
            (1.0 - self.output.getOutput()**2) * self.output.getNetDelta())
Ejemplo n.º 3
0
class ReluLayer(AbstractLayer):
   """
   A layer which implements rectified linear activation
   """

   def __init__(self, layerSize):
      """
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A ReLU layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using the soft ReLU function
      """

      # Perform the activation (set any negative values to zero)
      self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """
            
      self.input.setDelta(np.where(self.output.getOutput() > 0, 1.0, 0.0) * self.output.getNetDelta())
class AbsoluteValueLayer(AbstractLayer):
   """
   A layer which implements absolute value activation
   """

   def __init__(self, layerSize):
      """
      An absolute value layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using absolute value
      """

      # Perform the activation (logistic function)
      self.output.setOutput(np.abs(self.input.getNetInput()))


   def backward(self):
      """
      Perform a backprop step - gradient is the sign of the output
      """
            
      self.input.setDelta(np.sign(self.output.getOutput()) * self.output.getNetDelta())
class AbsoluteValueLayer(AbstractLayer):
    """
   A layer which implements absolute value activation
   """
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using absolute value
      """

        # Perform the activation (logistic function)
        self.output.setOutput(np.abs(self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is the sign of the output
      """

        self.input.setDelta(
            np.sign(self.output.getOutput()) * self.output.getNetDelta())
Ejemplo n.º 6
0
class ReluLayer(AbstractLayer):
    """
   A layer which implements rectified linear activation
   """
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using the soft ReLU function
      """

        # Perform the activation (set any negative values to zero)
        self.output.setOutput(np.fmax(0.0, self.input.getNetInput()))

    def backward(self):
        """
      Perform a backprop step - gradient is simply 1 where the data is positive
      """

        self.input.setDelta(
            np.where(self.output.getOutput() > 0, 1.0, 0.0) *
            self.output.getNetDelta())
Ejemplo n.º 7
0
class SoftmaxLayer(AbstractLayer):
    """
   A layer which implements sigmoid activation
   """
    def __init__(self, layerSize):
        """
      A sigmoid layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

    def forward(self):
        """
      Perform a forward step - activate the net input using logistic function
      """

        # Perform the activation
        self.output.setOutput(gpu.exp(self.input.getNetInput()))
        self.output.setOutput(
            self.output.getOutput() /
            (gpu.garray([gpu.sum(self.output.getOutput(), 1)]).transpose()))

    def backward(self):
        """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """

        #      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
        self.input.setDelta(self.output.getNetDelta())
Ejemplo n.º 8
0
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
Ejemplo n.º 10
0
class DelayLayer(AbstractLayer):
   """
   A layer which implements a delay in time
   """

   def __init__(self, layerSize, initialHistory):
      """
      A delay layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A delay layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)

      # A delay layer has a history, which propagates forward
      # when step is called
      self.initial_history = initialHistory
      self.history = np.zeros((1,layerSize))
      self.current_step = 0


   def forward(self):
      """
      Perform a forward step - set the output to the current history
      """

      # Is this the first timestep?  Then adjust the shape of history to match
      # the shape of the input
      if self.current_step == 0:
         net_input = self.input.getNetInput()
         self.history = np.zeros(net_input.shape)
         self.history[:] = self.initial_history

      # Propagate the history forward, and set the input to the history
      self.output.setOutput(self.history)


   def backward(self):
      """
      Perform the backprop step - simply shift the delta backward
      """

      self.input.setDelta(self.output.getNetDelta())

   def step(self):
      """
      Step forward in time.  Set the history to the current input
      """

      self.history = self.input.getNetInput()
      self.current_step += 1
Ejemplo n.º 11
0
class DelayLayer(AbstractLayer):
    """
   A layer which implements a delay in time
   """
    def __init__(self, layerSize, initialHistory):
        """
      A delay layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A delay layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

        # A delay layer has a history, which propagates forward
        # when step is called
        self.initial_history = initialHistory
        self.history = np.zeros((1, layerSize))
        self.current_step = 0

    def forward(self):
        """
      Perform a forward step - set the output to the current history
      """

        # Is this the first timestep?  Then adjust the shape of history to match
        # the shape of the input
        if self.current_step == 0:
            net_input = self.input.getNetInput()
            self.history = np.zeros(net_input.shape)
            self.history[:] = self.initial_history

        # Propagate the history forward, and set the input to the history
        self.output.setOutput(self.history)

    def backward(self):
        """
      Perform the backprop step - simply shift the delta backward
      """

        self.input.setDelta(self.output.getNetDelta())

    def step(self):
        """
      Step forward in time.  Set the history to the current input
      """

        self.history = self.input.getNetInput()
        self.current_step += 1
Ejemplo n.º 12
0
class SplitLayer(AbstractLayer):
   """
   A layer which splits an input port into multiple output ports
   """

   def __init__(self, inputSize, outputSizes):
      """
      Create a layer which splits the input into the provided output sizes
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # Should probably have an assertion that the output sizes add up to the
      # input sizes

      # A sigmoid layer has an input port and output port
      self.input = InputPort(inputSize)
      self.outputPorts = []
      for size in outputSizes:
         self.outputPorts.append(OutputPort(size))


   def forward(self):
      """
      Perform a forward step - split the input to the various outputs
      """

      # We'll iterate through the ports, splitting the input among them
      idx = 0

      for port in self.outputPorts:
         port.setOutput(self.input.getNetInput()[:,idx:idx+port.size])
         idx += port.size


   def backward(self):
      """
      Perform a backprop step - join the net deltas together to get the input delta
      """

      # We'll iterate through the output ports, getting each delta
      deltas = np.zeros(self.input.getNetInput().shape)
      idx = 0

      for port in self.outputPorts:
         deltas[:,idx:idx+port.size] = port.getNetDelta()
            
      self.input.setDelta(deltas)
Ejemplo n.º 13
0
    def __init__(self, size, initialHistory=np.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = np.copy(initialHistory)
        self.initialHistory = initialHistory
Ejemplo n.º 14
0
    def __init__(self, size, initialHistory=gpu.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = gpu.garray(np.copy(
            initialHistory.as_numpy_array()))
        self.initialHistory = initialHistory
Ejemplo n.º 15
0
   def __init__(self, inputSize, outputSizes):
      """
      Create a layer which splits the input into the provided output sizes
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # Should probably have an assertion that the output sizes add up to the
      # input sizes

      # A sigmoid layer has an input port and output port
      self.input = InputPort(inputSize)
      self.outputPorts = []
      for size in outputSizes:
         self.outputPorts.append(OutputPort(size))
Ejemplo n.º 16
0
    def __init__(self, layerSize, initialHistory):
        """
      A delay layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A delay layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

        # A delay layer has a history, which propagates forward
        # when step is called
        self.initial_history = initialHistory
        self.history = np.zeros((1, layerSize))
        self.current_step = 0
Ejemplo n.º 17
0
   def __init__(self, layerSize):
      """
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)
   def __init__(self, layerSize):
      """
      An absolute value layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)
Ejemplo n.º 19
0
class SoftmaxLayer(AbstractLayer):
   """
   A layer which implements sigmoid activation
   """

   def __init__(self, layerSize):
      """
      A sigmoid layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)


   def forward(self):
      """
      Perform a forward step - activate the net input using logistic function
      """

      # Perform the activation
      net_input = self.input.getNetInput()
      net_input[net_input > 10.0] = 10.0
      net_input[net_input < -10.0] = -10.0

      self.output.setOutput(np.exp(net_input) + 1e-10)
      self.output.setOutput(self.output.getOutput() / (np.array([np.sum(self.output.getOutput(),1)]).transpose()))


   def backward(self):
      """
      Perform a backprop step - gradient is the derivative of the sigmoid functon
      """
            
#      self.input.setDelta(self.output.getOutput() * (1.0 - self.output.getOutput()) * self.output.getNetDelta())
      self.input.setDelta(self.output.getNetDelta())
Ejemplo n.º 20
0
   def __init__(self, size, initialHistory=np.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = np.copy(initialHistory)
      self.initialHistory = initialHistory
Ejemplo n.º 21
0
   def __init__(self, size, initialHistory=gpu.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = gpu.garray(np.copy(initialHistory.as_numpy_array()))
      self.initialHistory = initialHistory
Ejemplo n.º 22
0
   def __init__(self, layerSize, initialHistory):
      """
      A delay layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A delay layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)

      # A delay layer has a history, which propagates forward
      # when step is called
      self.initial_history = initialHistory
      self.history = np.zeros((1,layerSize))
      self.current_step = 0
Ejemplo n.º 23
0
class HistoryLayer(AbstractLayer):
    """
   A useful internal layer for Recurrent Layers which maintains a history
   of activations.
   """
    def __init__(self, size, initialHistory=np.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = np.copy(initialHistory)
        self.initialHistory = initialHistory

    def forward(self):
        """
      Do nothing.  step handles this layer correctly
      """

        pass

    def backward(self):
        """
      Do nothing.  backstep handles this layer correctly
      """

        pass

    def step(self):
        """
      Push the current output into the history, and propagate input forward
      """

        self.history.append(self.output.value[:])
        self.output.value = self.input.getNetInput()

    def backstep(self):
        """
      Pop the output from the history, and propagate the delta backward
      """

        self.input.setDelta(self.output.getNetDelta())
        self.output.value = self.history.pop()

    def reset(self):
        """
      Reset the history to empty and output to initialHistory
      """

        self.history = []
        self.output.value[:] = self.initialHistory

    def setDelta(self, delta):
        """
      Set the delta on the input layer to the provided value
      """

        self.input.setDelta(delta)
Ejemplo n.º 24
0
class HistoryLayer(AbstractLayer):
   """
   A useful internal layer for Recurrent Layers which maintains a history
   of activations.
   """

   def __init__(self, size, initialHistory=gpu.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = gpu.garray(np.copy(initialHistory.as_numpy_array()))
      self.initialHistory = initialHistory


   def forward(self):
      """
      Do nothing.  step handles this layer correctly
      """
  
      pass


   def backward(self):
      """
      Do nothing.  backstep handles this layer correctly
      """

      pass


   def step(self):
      """
      Push the current output into the history, and propagate input forward
      """

      self.history.append(self.output.value[:])
      self.output.value = self.input.getNetInput()


   def backstep(self):
      """
      Pop the output from the history, and propagate the delta backward
      """

      self.input.setDelta(self.output.getNetDelta())
      self.output.value = self.history.pop()


   def reset(self):
      """
      Reset the history to empty and output to initialHistory
      """

      self.history = []
      self.output.value[:] = self.initialHistory


   def setDelta(self, delta):
      """
      Set the delta on the input layer to the provided value
      """

      self.input.setDelta(delta)