Ejemplo n.º 1
0
    def __setattr__(self, k, v):
        if k == "name" and hasattr(self, k):
            if len(self.network.layers) > 1:
                raise ValueError(
                    "You can't change the name of a connected layer")
            else:
                object.__setattr__(self, k, v)
                self.network = MNET.Network()
                self.network._addLayer(self)

        try:
            deco = self._decorating
        except AttributeError:
            object.__setattr__(self, k, v)
            return

        if deco:
            var = getattr(self, k)
            try:
                var.set_value(numpy.asarray(v, dtype=theano.config.floatX),
                              borrow=True)
                return
            except AttributeError:
                pass

        object.__setattr__(self, k, v)
Ejemplo n.º 2
0
    def __init__(self, size, name=None, **kwargs):
        Layer_ABC.__init__(self, size, name=name, **kwargs)
        self.kwargs = kwargs
        self.type = TYPE_INPUT_LAYER
        self.nbInputs = size
        self.network = MNET.Network()
        self.network.addInput(self)

        self.inputs = tt.matrix(name=self.name)
Ejemplo n.º 3
0
    def _resetNetwork(self, fullReset=True, newNetwork=None):
        if fullReset:
            self._initStatus = 0

        if newNetwork is None:
            self.network = MNET.Network()
            self.network._addLayer(self)
        else:
            self.network = newNetwork
Ejemplo n.º 4
0
    def __init__(self,
                 size,
                 layerTypes,
                 activation=MA.Pass(),
                 regularizations=[],
                 initializations=[],
                 learningScenario=None,
                 decorators=[],
                 name=None):

        self.isLayer = True

        #a unique tag associated to the layer
        self.appelido = numpy.random.random()

        if name is not None:
            self.name = name
        else:
            self.name = "%s_%s" % (self.__class__.__name__, self.appelido)

        self.types = layerTypes

        self.nbInputs = None
        self.inputs = None
        self.nbOutputs = size
        self.outputs = None  # this is a symbolic var
        self.testOutputs = None  # this is a symbolic var

        self.preactivation_outputs = None
        self.preactivation_testOutputs = None

        self.activation = activation
        self.regularizationObjects = regularizations
        self.regularizations = []
        self.decorators = decorators
        self.initializations = initializations
        self.learningScenario = learningScenario

        self.network = MNET.Network()
        self.network._addLayer(self)

        self._inputRegistrations = set()

        self._mustInit = True
        self._mustReset = True
        self._decorating = False

        self.parameters = {}
Ejemplo n.º 5
0
    def __init__(self, size, nbDimentions, dictSize, name=None, **kwargs):
        """
		:param size int: the size of the input vector (if your input is a sentence this should be the number of words in it).
		:param nbDimentions int: the number of dimentions in wich to encode each word.
		:param dictSize int: the total number of words. 
		"""
        Layer_ABC.__init__(self, size, nbDimentions, name=name, **kwargs)
        self.network = MNET.Network()
        self.network.addInput(self)

        self.type = TYPE_INPUT_LAYER
        self.dictSize = dictSize
        self.nbDimentions = nbDimentions

        self.nbInputs = size
        self.nbOutputs = self.nbDimentions * self.nbInputs

        initEmb = numpy.asarray(numpy.random.random(
            (self.dictSize, self.nbDimentions)),
                                dtype=theano.config.floatX)

        self.embeddings = theano.shared(initEmb)
        self.inputs = tt.imatrix()
        self.test_inputs = tt.imatrix()