Esempio n. 1
0
    def __init__(self, layerSize):
        """
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A ReLU layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
    def __init__(self, layerSize):
        """
      An absolute value layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A sigmoid layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)
    def __init__(self, size, initialHistory=np.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = np.copy(initialHistory)
        self.initialHistory = initialHistory
Esempio n. 4
0
   def __init__(self, inputSize, outputSizes):
      """
      Create a layer which splits the input into the provided output sizes
      """

      # Properly inherit the AbstractLayer
      AbstractLayer.__init__(self)

      # Should probably have an assertion that the output sizes add up to the
      # input sizes

      # A sigmoid layer has an input port and output port
      self.input = InputPort(inputSize)
      self.outputPorts = []
      for size in outputSizes:
         self.outputPorts.append(OutputPort(size))
Esempio n. 5
0
    def __init__(self, size, initialHistory=gpu.zeros((0, 0))):
        """
      Create a History layer
      """

        AbstractLayer.__init__(self)
        self.layerSize = size

        self.input = InputPort(self.layerSize)
        self.output = OutputPort(self.layerSize)

        self.history = []

        self.output.value = gpu.garray(np.copy(
            initialHistory.as_numpy_array()))
        self.initialHistory = initialHistory
Esempio n. 6
0
    def __init__(self, layerSize, initialHistory):
        """
      A delay layer can be connected to several inputs
      """

        # Properly inherit the AbstractLayer
        AbstractLayer.__init__(self)

        # A delay layer has an input port and output port
        self.input = InputPort(layerSize)
        self.output = OutputPort(layerSize)

        # A delay layer has a history, which propagates forward
        # when step is called
        self.initial_history = initialHistory
        self.history = np.zeros((1, layerSize))
        self.current_step = 0