def __init__(self,
                 size,
                 initialHistory=np.zeros((0, 0)),
                 baseLayerClass=SigmoidLayer,
                 connectionClass=FullConnection):
        """
      A recurrent layer extends the activation layer by adding a full recurrent
      connection from the output of the layer to its input, delayed by a 
      timestep.
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # Extract the layerSize from the provided activation layer
        self.baseLayer = baseLayerClass(size)
        self.layerSize = size
        self.historyLayer = HistoryLayer(self.layerSize, initialHistory)

        # A recurrent layer has an input port, history port and output port
        self.input = self.baseLayer.input
        self.output = self.baseLayer.output

        # Make two connections - the recurrent connection to the history and a connection from
        # the history to the activationLayer
        self.recurrentConnection = connectionClass(self.output,
                                                   self.historyLayer.input)
        self.historyConnection = IdentityConnection(self.historyLayer.output,
                                                    self.input)

        # Keep track of how many timesteps there were, and the initial history incase of reset
        self.timestep = 0
   def __init__(self, size, initialHistory = gpu.zeros((0,0)), baseLayerClass = SigmoidLayer, connectionClass = FullConnection):
      """
      A recurrent layer extends the activation layer by adding a full recurrent
      connection from the output of the layer to its input, delayed by a 
      timestep.
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # Extract the layerSize from the provided activation layer
      self.baseLayer = baseLayerClass(size)
      self.layerSize = size
      self.historyLayer = HistoryLayer(self.layerSize, initialHistory)

      # A recurrent layer has an input port, history port and output port
      self.input = self.baseLayer.input
      self.output = self.baseLayer.output

      # Make two connections - the recurrent connection to the history and a connection from
      # the history to the activationLayer
      self.recurrentConnection = connectionClass(self.output, self.historyLayer.input)
      self.historyConnection = IdentityConnection(self.historyLayer.output, self.input)

      # Keep track of how many timesteps there were, and the initial history incase of reset
      self.timestep = 0
   def __init__(self, layerSize):
      """
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
   def __init__(self, inputSize):
      """
      Create an input layer, with batchSize rows and inputSize columns
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # This layer only has a output port. 
      self.output = OutputPort(inputSize)
Exemple #6
0
    def __init__(self, inputSize):
        """
      Create an input layer, with batchSize rows and inputSize columns
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # This layer only has a output port.
        self.output = OutputPort(inputSize)
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
   def __init__(self, layerSize):
      """
      An absolute value layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A sigmoid layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)
    def __init__(self, size, initialHistory=np.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = np.copy(initialHistory)
        self.initialHistory = initialHistory
   def __init__(self, size, initialHistory=gpu.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = gpu.garray(np.copy(initialHistory.as_numpy_array()))
      self.initialHistory = initialHistory
   def __init__(self, size, initialHistory=np.zeros((0,0))):
      """
      Create a History layer
      """

      AbstractLayer.__init__(self)
      self.layerSize = size

      self.input = InputPort(self.layerSize)
      self.output = OutputPort(self.layerSize)

      self.history = []

      self.output.value = np.copy(initialHistory)
      self.initialHistory = initialHistory
   def __init__(self, inputSize, outputSizes):
      """
      Create a layer which splits the input into the provided output sizes
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # Should probably have an assertion that the output sizes add up to the
      # input sizes

      # A sigmoid layer has an input port and output port
      self.input = InputPort(inputSize)
      self.outputPorts = []
      for size in outputSizes:
         self.outputPorts.append(OutputPort(size))
Exemple #13
0
    def __init__(self, size, initialHistory=gpu.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = gpu.garray(np.copy(
            initialHistory.as_numpy_array()))
        self.initialHistory = initialHistory
   def __init__(self, layerSize, initialHistory):
      """
      A delay layer can be connected to several inputs
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # A delay layer has an input port and output port
      self.input = InputPort(layerSize)
      self.output = OutputPort(layerSize)

      # A delay layer has a history, which propagates forward
      # when step is called
      self.initial_history = initialHistory
      self.history = np.zeros((1,layerSize))
      self.current_step = 0
    def __init__(self, layerSize, initialHistory):
        """
      A delay layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A delay layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

        # A delay layer has a history, which propagates forward
        # when step is called
        self.initial_history = initialHistory
        self.history = np.zeros((1, layerSize))
        self.current_step = 0