Пример #1
0
    def __init__(self, dim, peepholes=False, name=None):
        """
        :arg dim: number of cells
        :key peepholes: enable peephole connections (from state to gates)? """
        self.setArgs(dim=dim, peepholes=peepholes)

        # Internal buffers, created dynamically:
        self.bufferlist = [
            ('ingate', dim),
            ('outgate', dim),
            ('forgetgate', dim),
            ('ingatex', dim),
            ('outgatex', dim),
            ('forgetgatex', dim),
            ('state', dim),
            ('ingateError', dim),
            ('outgateError', dim),
            ('forgetgateError', dim),
            ('stateError', dim),
        ]

        Module.__init__(self, 4 * dim, dim, name)
        if self.peepholes:
            ParameterContainer.__init__(self, dim * 3)
            self._setParameters(self.params)
            self._setDerivatives(self.derivs)
Пример #2
0
 def __init__(self, dim, sigma=0.):
     Explorer.__init__(self, dim, dim)
     self.dim = dim
     
     # initialize parameters to sigma
     ParameterContainer.__init__(self, dim, stdParams=0)
     self.sigma = [sigma] * dim
Пример #3
0
    def __init__(self, dim, peepholes = False, name = None):
        """
        :arg dim: number of cells
        :key peepholes: enable peephole connections (from state to gates)? """
        self.setArgs(dim = dim, peepholes = peepholes)

        # Internal buffers, created dynamically:
        self.bufferlist = [
            ('ingate', dim),
            ('outgate', dim),
            ('forgetgate', dim),
            ('ingatex', dim),
            ('outgatex', dim),
            ('forgetgatex', dim),
            ('state', dim),
            ('ingateError', dim),
            ('outgateError', dim),
            ('forgetgateError', dim),
            ('stateError', dim),
        ]

        Module.__init__(self, 4*dim, dim, name)
        if self.peepholes:
            ParameterContainer.__init__(self, dim*3)
            self._setParameters(self.params)
            self._setDerivatives(self.derivs)
Пример #4
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     nrNeurons = self.outdim
     first, second = 0, 0
     first, second = second, second + indim * nrNeurons
     self.ingateConns = self.params[first:second]
     first, second = second, second + indim * nrNeurons
     self.forgetgateConns = self.params[first:second]
     first, second = second, second + indim * nrNeurons
     self.cellConns = self.params[first:second]
     first, second = second, second + indim * nrNeurons
     self.outgateConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.ingateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.forgetgateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.cellRecConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.outgateRecConns = self.params[first:second]
     if self.peep:
         first, second = second, second + nrNeurons
         self.ingatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.forgetgatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.outgatePeepWeights = self.params[first:second]
Пример #5
0
 def _setDerivatives(self, d, owner=None):
     ParameterContainer._setDerivatives(self, d, owner)
     nrNeurons = self.outdim
     first, second = 0, 0
     first, second = second, second + indim * nrNeurons
     self.ingateConnDerivs = self.derivs[first:second]
     first, second = second, second + indim * nrNeurons
     self.forgetgateConnDerivs = self.derivs[first:second]
     first, second = second, second + indim * nrNeurons
     self.cellConnDerivs = self.derivs[first:second]
     first, second = second, second + indim * nrNeurons
     self.outgateConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.ingateRecConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.forgetgateRecConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.cellRecConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.outgateRecConnDerivs = self.derivs[first:second]
     if self.peep:
         first, second = second, second + nrNeurons
         self.ingatePeepDerivs = self.derivs[first:second]
         first, second = second, second + nrNeurons
         self.forgetgatePeepDerivs = self.derivs[first:second]
         first, second = second, second + nrNeurons
         self.outgatePeepDerivs = self.derivs[first:second]
Пример #6
0
 def _setDerivatives(self, d, owner = None):
     ParameterContainer._setDerivatives(self, d, owner)
     nrNeurons = self.outdim
     first, second = 0, 0
     first, second = second, second + indim*nrNeurons
     self.ingateConnDerivs = self.derivs[first:second]
     first, second = second, second + indim*nrNeurons
     self.forgetgateConnDerivs = self.derivs[first:second]
     first, second = second, second + indim*nrNeurons
     self.cellConnDerivs = self.derivs[first:second]
     first, second = second, second + indim*nrNeurons
     self.outgateConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.ingateRecConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.forgetgateRecConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.cellRecConnDerivs = self.derivs[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.outgateRecConnDerivs = self.derivs[first:second]
     if self.peep:
         first, second = second, second + nrNeurons
         self.ingatePeepDerivs = self.derivs[first:second]
         first, second = second, second + nrNeurons
         self.forgetgatePeepDerivs = self.derivs[first:second]
         first, second = second, second + nrNeurons
         self.outgatePeepDerivs = self.derivs[first:second]
Пример #7
0
 def __init__(self, dim, name=None):
     NeuronLayer.__init__(self, dim, name)
     # initialize sigmas to 0
     ParameterContainer.__init__(self, dim, stdParams = 0)
     # if autoalpha is set to True, alpha_sigma = alpha_mu = alpha*sigma^2
     self.autoalpha = False
     self.enabled = True
Пример #8
0
    def __init__(self, dim, sigma=0.0):
        Explorer.__init__(self, dim, dim)
        self.dim = dim

        # initialize parameters to sigma
        ParameterContainer.__init__(self, dim, stdParams=0)
        self.sigma = [sigma] * dim
Пример #9
0
 def _setParameters(self, p, owner = None):
     ParameterContainer._setParameters(self, p, owner)
     nrNeurons = self.outdim
     first, second = 0, 0
     first, second = second, second + indim*nrNeurons
     self.ingateConns = self.params[first:second]
     first, second = second, second + indim*nrNeurons
     self.forgetgateConns = self.params[first:second]
     first, second = second, second + indim*nrNeurons
     self.cellConns = self.params[first:second]
     first, second = second, second + indim*nrNeurons
     self.outgateConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.ingateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.forgetgateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.cellRecConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.outgateRecConns = self.params[first:second]
     if self.peep:
         first, second = second, second + nrNeurons
         self.ingatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.forgetgatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.outgatePeepWeights = self.params[first:second]
Пример #10
0
 def _setDerivatives(self, d, owner=None):
     """ put slices of this array back into the modules """
     ParameterContainer._setDerivatives(self, d, owner)
     index = 0
     for x in self._containerIterator():
         x._setDerivatives(self.derivs[index:index + x.paramdim], self)
         index += x.paramdim
Пример #11
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     size = self.dim
     self.ingatePeepWeights = self.params[:size]
     self.forgetgatePeepWeights = self.params[size:size *
                                              (1 + self.dimensions)]
     self.outgatePeepWeights = self.params[size * (1 + self.dimensions):]
Пример #12
0
 def __init__(self, numStates, numActions, env, name=None):
     Module.__init__(self, 1, 1, name)
     ParameterContainer.__init__(self, numStates * numActions)
     self.numRows = numStates
     self.numColumns = numActions
     #self.allowed_actions = range(numActions)
     self.env = env
Пример #13
0
 def _setDerivatives(self, d, owner=None):
     """ put slices of this array back into the modules """
     ParameterContainer._setDerivatives(self, d, owner)
     index = 0
     for x in self._containerIterator():
         x._setDerivatives(self.derivs[index:index + x.paramdim], self)
         index += x.paramdim
Пример #14
0
 def __init__(self, dim, name=None):
     NeuronLayer.__init__(self, dim, name)
     # initialize sigmas to 0
     ParameterContainer.__init__(self, dim, stdParams=0)
     # if autoalpha is set to True, alpha_sigma = alpha_mu = alpha*sigma^2
     self.autoalpha = False
     self.enabled = True
Пример #15
0
 def _setInitEvaluable(self, evaluable):
     if evaluable is None:
         # if there is no initial point specified, we start at one that's sampled 
         # normally around the origin.
         if self.numParameters is not None:
             evaluable = randn(self.numParameters)
         else:
             raise ValueError('Could not determine the dimensionality of the evaluator. '+\
                              'Please provide an initial search point.')   
     if isinstance(evaluable, list):
         evaluable = array(evaluable)
     
     # If the evaluable is provided as a list of numbers or as an array,
     # we wrap it into a ParameterContainer.
     if isinstance(evaluable, ndarray):            
         pc = ParameterContainer(len(evaluable))
         pc._setParameters(evaluable)
         self._wasWrapped = True
         evaluable = pc
     self._initEvaluable = evaluable
     if isinstance(self._initEvaluable, ParameterContainer):
         if self.numParameters is None:            
             self.numParameters = len(self._initEvaluable)
         elif self.numParameters != len(self._initEvaluable):
             raise ValueError("Parameter dimension mismatch: evaluator expects "+str(self.numParameters)\
                              +" but the evaluable has "+str(len(self._initEvaluable))+".")
Пример #16
0
    def _setInitEvaluable(self, evaluable):
        if evaluable is None:
            # if there is no initial point specified, we start at one that's sampled
            # normally around the origin.
            if self.numParameters is not None:
                evaluable = randn(self.numParameters)
            else:
                raise ValueError('Could not determine the dimensionality of the evaluator. '+\
                                 'Please provide an initial search point.')
        if isinstance(evaluable, list):
            evaluable = array(evaluable)

        # If the evaluable is provided as a list of numbers or as an array,
        # we wrap it into a ParameterContainer.
        if isinstance(evaluable, ndarray):
            pc = ParameterContainer(len(evaluable))
            pc._setParameters(evaluable)
            self._wasWrapped = True
            evaluable = pc
        self._initEvaluable = evaluable
        if isinstance(self._initEvaluable, ParameterContainer):
            if self.numParameters is None:
                self.numParameters = len(self._initEvaluable)
            elif self.numParameters is not len(self._initEvaluable):
                raise ValueError("Parameter dimension mismatch: evaluator expects "+str(self.numParameters)\
                                 +" but the evaluable has "+str(len(self._initEvaluable))+".")
Пример #17
0
 def __init__(self, inmod, outmod, name=None,
              inSliceFrom=0, inSliceTo=None, outSliceFrom=0, outSliceTo=None):
     if outSliceTo is None:
         outSliceTo = outmod.indim
     size = outSliceTo - outSliceFrom
     Connection.__init__(self, inmod, outmod, name,
                         inSliceFrom, inSliceTo, outSliceFrom, outSliceTo)
     ParameterContainer.__init__(self, size)
Пример #18
0
 def __init__(self, inmod, outmod, name=None,
              inSliceFrom=0, inSliceTo=None, outSliceFrom=0, outSliceTo=None):
     if inSliceTo is None:
         inSliceTo = inmod.outdim
     size = inSliceTo - inSliceFrom
     Connection.__init__(self, inmod, outmod, name,
                         inSliceFrom, inSliceTo, outSliceFrom, outSliceTo)
     ParameterContainer.__init__(self, size)
Пример #19
0
 def _setDerivatives(self, d, owner=None):
     ParameterContainer._setDerivatives(self, d, owner)
     size = self.dim
     self.ingatePeepDerivs = self.derivs[:size]
     self.forgetgatePeepDerivs = \
         self.derivs[size:size * (1 + self.dimensions)]
     self.outgatePeepDerivs = \
         self.derivs[size * (1 + self.dimensions):]        
Пример #20
0
 def _setDerivatives(self, d, owner=None):
     ParameterContainer._setDerivatives(self, d, owner)
     size = self.dim
     self.ingatePeepDerivs = self.derivs[:size]
     self.forgetgatePeepDerivs = \
         self.derivs[size:size * (1 + self.dimensions)]
     self.outgatePeepDerivs = \
         self.derivs[size * (1 + self.dimensions):]
Пример #21
0
    def __init__(self, numRows, numColumns, name=None):
        """ initialize with the number of rows and columns. the table
            values are all set to zero.
        """
        Module.__init__(self, 2, 1, name)
        ParameterContainer.__init__(self, numRows*numColumns)

        self.numRows = numRows
        self.numColumns = numColumns
Пример #22
0
    def __init__(self, numRows, numColumns, name=None):
        """ initialize with the number of rows and columns. the table
            values are all set to zero.
        """
        Module.__init__(self, 2, 1, name)
        ParameterContainer.__init__(self, numRows*numColumns)

        self.numRows = numRows
        self.numColumns = numColumns
 def __init__(self, inmod, outmod, name=None,
              inSliceFrom=0, inSliceTo=None, outSliceFrom=0, outSliceTo=None):
     if inSliceTo is None:
         inSliceTo = inmod.outdim
     size = inSliceTo - inSliceFrom
     # FIXME: call `super()` with named kwargs so that cooperative inhertiance will work, otherwise...
     # >>> isinstance(LinearConnection, Connection)
     # False
     # >>> isinstance(LinearConnection, ParameterContainer)
     # False
     Connection.__init__(self, inmod, outmod, name,
                         inSliceFrom, inSliceTo, outSliceFrom, outSliceTo)
     ParameterContainer.__init__(self, size)
Пример #24
0
    def __init__(self, actionnum, T, theta, **args):
        self.feadim = len(theta)
        Module.__init__(self, self.feadim * actionnum, 1, **args)
        ParameterContainer.__init__(self, self.feadim)
        self.T = T
        self.g = None
        self.bf = None

        # feadimx1 vector.
        self.theta = theta
        self.actionnum = actionnum

        self.cachedActionProb = None
Пример #25
0
    def __init__ (self, indim = 27, outdim = 6, seed = None, channels_setup = None, steps = None, types_subset = None):
        self._seed = seed or []
        self._steps = steps or 10
        self._channels_setup = channels_setup

        self._types_subset = types_subset or []
        if isinstance(self._seed, str):
            self.parse_seed(self._seed)
        if not self._seed:
            self.generate_seed()

        #print "Init module ", self.get_num_channels()
        Module.__init__(self, indim, outdim, None)
        ParameterContainer.__init__(self, indim)
Пример #26
0
 def __init__(self, statedim, actiondim, sigma= -2.):
     Explorer.__init__(self, actiondim, actiondim)
     self.statedim = statedim
     self.actiondim = actiondim
     
     # initialize parameters to sigma
     ParameterContainer.__init__(self, actiondim, stdParams=0)
     self.sigma = [sigma] * actiondim
     
     # exploration matrix (linear function)
     self.explmatrix = random.normal(0., expln(self.sigma), (statedim, actiondim))
     
     # store last state
     self.state = None
Пример #27
0
    def __init__(self, statedim, actiondim, sigma= -2.):
        Explorer.__init__(self, actiondim, actiondim)
        self.statedim = statedim
        self.actiondim = actiondim

        # initialize parameters to sigma
        ParameterContainer.__init__(self, actiondim, stdParams=0)
        self.sigma = [sigma] * actiondim

        # exploration matrix (linear function)
        self.explmatrix = random.normal(0., expln(self.sigma), (statedim, actiondim))

        # store last state
        self.state = None
Пример #28
0
    def __init__(self, dx, dy, *args, **kwargs):
        Connection.__init__(self, *args, **kwargs)
        self.dx = dx
        self.dy = dy
        ParameterContainer.__init__(self, 4 * self.outdim)

        for i in xrange(0, self.outdim):
            self.params[2 + i * 4] = dx / 6.0
            self.params[3 + i * 4] = dx / 4.0

        self.xx = numpy.repeat([numpy.arange(0, self.dx, 1)], self.dy, axis=0).T
        self.yy = numpy.repeat([numpy.arange(0, self.dy, 1)], self.dx, axis=0)
        assert self.indim == self.dx * self.dy, "Indim (%i) does not equal dx * dy (%i %i)" % (
            self.indim,
            self.dx,
            self.dy,
        )
Пример #29
0
    def sortModules(self):
        """Prepare the network for activation by sorting the internal
        datastructure.

        Needs to be called before activation."""
        if self.sorted:
            return
        # Sort the modules.
        self._topologicalSort()
        # Sort the connections by name.
        for m in self.modules:
            self.connections[m].sort(key=lambda x: x.name)
        self.motherconnections.sort(key=lambda x: x.name)

        # Create a single array with all parameters.
        tmp = [pc.params for pc in self._containerIterator()]
        total_size = sum(scipy.size(i) for i in tmp)
        ParameterContainer.__init__(self, total_size)
        if total_size > 0:
            self.params[:] = scipy.concatenate(tmp)
            self._setParameters(self.params)

            # Create a single array with all derivatives.
            tmp = [pc.derivs for pc in self._containerIterator()]
            self.resetDerivatives()
            self.derivs[:] = scipy.concatenate(tmp)
            self._setDerivatives(self.derivs)

        # TODO: make this a property; indim and outdim are invalid before
        # .sortModules is called!
        # Determine the input and output dimensions of the network.
        self.indim = sum(m.indim for m in self.inmodules)
        self.outdim = sum(m.outdim for m in self.outmodules)

        self.indim = 0
        for m in self.inmodules:
            self.indim += m.indim
        self.outdim = 0
        for m in self.outmodules:
            self.outdim += m.outdim

        # Initialize the network buffers.
        self.bufferlist = []
        Module.__init__(self, self.indim, self.outdim, name=self.name)
        self.sorted = True
Пример #30
0
 def __init__(self, name=None, **args):
     ParameterContainer.__init__(self, **args)
     self.name = name
     # Due to the necessity of regular testing for membership, modules are
     # stored in a set.
     self.modules = set()
     self.modulesSorted = []
     # The connections are stored in a dictionary: the key is the module
     # where the connection leaves from, the value is a list of the
     # corresponding connections.
     self.connections = {}
     self.inmodules = []
     self.outmodules = []
     # Special treatment of weight-shared connections.
     self.motherconnections = []
     # This flag is used to make sure that the modules are reordered when
     # new connections are added.
     self.sorted = False
Пример #31
0
    def __init__(self, dim, module, name=None, onesigma=True):
        NeuronLayer.__init__(self, dim, name)
        self.exploration = zeros(dim, float)
        self.state = None
        self.onesigma = onesigma

        if self.onesigma:
            # one single parameter: sigma
            ParameterContainer.__init__(self, 1)
        else:
            # sigmas for all parameters in the exploration module
            ParameterContainer.__init__(self, module.paramdim)

        # a module for the exploration
        assert module.outdim == dim, "Passed module does not have right dimension"
        self.module = module
        self.autoalpha = False
        self.enabled = True
Пример #32
0
    def __init__(self, indim, outdim, peepholes=False, name=None):
        nrNeurons = outdim
        self.peep = peepholes
        # internal buffers:
        self.ingate = zeros((0, nrNeurons))
        self.outgate = zeros((0, nrNeurons))
        self.forgetgate = zeros((0, nrNeurons))
        self.cell = zeros((0, nrNeurons))
        self.ingatex = zeros((0, nrNeurons))
        self.outgatex = zeros((0, nrNeurons))
        self.forgetgatex = zeros((0, nrNeurons))
        self.cellx = zeros((0, nrNeurons))
        self.state = zeros((0, nrNeurons))
        self.ingateError = zeros((0, nrNeurons))
        self.outgateError = zeros((0, nrNeurons))
        self.forgetgateError = zeros((0, nrNeurons))
        self.stateError = zeros((0, nrNeurons))
        self.Sin = zeros((0, indim * nrNeurons))
        self.Sforget = zeros((0, indim * nrNeurons))
        self.Scell = zeros((0, indim * nrNeurons))
        self.SinRec = zeros((0, nrNeurons * nrNeurons))
        self.SforgetRec = zeros((0, nrNeurons * nrNeurons))
        self.ScellRec = zeros((0, nrNeurons * nrNeurons))

        Module.__init__(self, indim, outdim, name)
        if self.peep:
            ParameterContainer.__init__(
                self, nrNeurons * 3 + (4 * indim + nrNeurons) * nrNeurons)
            self.Sin_peep = zeros((0, nrNeurons))
            self.Sforget_peep = zeros((0, nrNeurons))
            self.Scell_peep = zeros((0, nrNeurons))
        else:
            ParameterContainer.__init__(self,
                                        (4 * indim + nrNeurons) * nrNeurons)
        self._setParameters(self.params)
        self._setDerivatives(self.derivs)

        # transfer functions and their derivatives
        self.f = sigmoid
        self.fprime = sigmoidPrime
        self.g = lambda x: 2 * tanh(x)
        self.gprime = lambda x: 2 * tanhPrime(x)
        self.h = self.g
        self.hprime = self.gprime
 def __init__(self, dim, module, name=None, onesigma=True):
     NeuronLayer.__init__(self, dim, name)
     self.exploration = zeros(dim, float)
     self.state = None
     self.onesigma = onesigma
     
     if self.onesigma:
         # one single parameter: sigma
         ParameterContainer.__init__(self, 1)
     else:
         # sigmas for all parameters in the exploration module
         ParameterContainer.__init__(self, module.paramdim)
     
     # a module for the exploration
     assert module.outdim == dim, (
         "Passed module does not have right dimension")
     self.module = module
     self.autoalpha = False
     self.enabled = True
Пример #34
0
 def __init__(self, indim, outdim, peepholes = False, name = None):
     nrNeurons = outdim
     self.peep = peepholes
     # internal buffers:
     self.ingate = zeros((0,nrNeurons))
     self.outgate = zeros((0,nrNeurons))
     self.forgetgate = zeros((0,nrNeurons))
     self.cell = zeros((0,nrNeurons))
     self.ingatex = zeros((0,nrNeurons))
     self.outgatex = zeros((0,nrNeurons))
     self.forgetgatex = zeros((0,nrNeurons))
     self.cellx = zeros((0,nrNeurons))
     self.state = zeros((0,nrNeurons))
     self.ingateError = zeros((0,nrNeurons))
     self.outgateError = zeros((0,nrNeurons))
     self.forgetgateError = zeros((0,nrNeurons))
     self.stateError = zeros((0,nrNeurons))
     self.Sin = zeros((0,indim*nrNeurons))
     self.Sforget = zeros((0,indim*nrNeurons))
     self.Scell = zeros((0,indim*nrNeurons))
     self.SinRec = zeros((0,nrNeurons*nrNeurons))
     self.SforgetRec = zeros((0,nrNeurons*nrNeurons))
     self.ScellRec = zeros((0,nrNeurons*nrNeurons))
     
     Module.__init__(self, indim, outdim, name)
     if self.peep:
         ParameterContainer.__init__(self, nrNeurons*3 + (4*indim+nrNeurons)*nrNeurons)
         self.Sin_peep = zeros((0,nrNeurons))
         self.Sforget_peep = zeros((0,nrNeurons))
         self.Scell_peep = zeros((0,nrNeurons))
     else:
         ParameterContainer.__init__(self, (4*indim+nrNeurons)*nrNeurons)
     self._setParameters(self.params)
     self._setDerivatives(self.derivs)
         
     # transfer functions and their derivatives
     self.f = sigmoid
     self.fprime = sigmoidPrime
     self.g = lambda x: 2*tanh(x)
     self.gprime = lambda x: 2*tanhPrime(x)
     self.h = self.g
     self.hprime = self.gprime
Пример #35
0
 def __init__(self, dim, peepholes = False, name = None):
     self.setArgs(dim = dim, peepholes = peepholes)
     
     # Internal buffers:
     self.bufferlist = [
         ('ingate', dim),
         ('outgate', dim),
         ('forgetgate', dim),
         ('ingatex', dim),
         ('outgatex', dim),
         ('forgetgatex', dim),
         ('state', dim),
         ('ingateError', dim),
         ('outgateError', dim),
         ('forgetgateError', dim),
         ('stateError', dim),
     ]
     
     Module.__init__(self, 4*dim, dim, name)
     if self.peepholes:
         ParameterContainer.__init__(self, dim*3)
         self._setParameters(self.params)
         self._setDerivatives(self.derivs)
Пример #36
0
    def __init__(self, dim, peepholes=False, name=None):
        self.setArgs(dim=dim, peepholes=peepholes)

        # Internal buffers:
        self.bufferlist = [
            ('ingate', dim),
            ('outgate', dim),
            ('forgetgate', dim),
            ('ingatex', dim),
            ('outgatex', dim),
            ('forgetgatex', dim),
            ('state', dim),
            ('ingateError', dim),
            ('outgateError', dim),
            ('forgetgateError', dim),
            ('stateError', dim),
        ]

        Module.__init__(self, 4 * dim, dim, name)
        if self.peepholes:
            ParameterContainer.__init__(self, dim * 3)
            self._setParameters(self.params)
            self._setDerivatives(self.derivs)
Пример #37
0
    def __init__(self, dim, dimensions=1, peepholes=False, name=None):
        self.setArgs(dim=dim, peepholes=peepholes, dimensions=dimensions)

        # Internal buffers:
        self.bufferlist = [
            ("ingate", dim),
            ("outgate", dim),
            ("forgetgate", dim * dimensions),
            ("ingatex", dim),
            ("outgatex", dim),
            ("forgetgatex", dim * dimensions),
            ("state", dim),
            ("ingateError", dim),
            ("outgateError", dim),
            ("forgetgateError", dim * dimensions),
            ("stateError", dim),
        ]

        Module.__init__(self, (3 + 2 * dimensions) * dim, dim * 2, name)

        if self.peepholes:
            ParameterContainer.__init__(self, dim * (2 + dimensions))
            self._setParameters(self.params)
            self._setDerivatives(self.derivs)
Пример #38
0
 def __init__(self, numStates, numActions, name=None):
     Module.__init__(self, 1, 1, name)
     ParameterContainer.__init__(self, numStates * numActions)
     self.numRows = numStates
     self.numColumns = numActions
Пример #39
0
 def _setParameters(self, p, owner = None):
     ParameterContainer._setParameters(self, p, owner)
     dim = self.outdim
     self.ingatePeepWeights = self.params[:dim]
     self.forgetgatePeepWeights = self.params[dim:dim*2]
     self.outgatePeepWeights = self.params[dim*2:]
Пример #40
0
 def _setDerivatives(self, d, owner = None):
     ParameterContainer._setDerivatives(self, d, owner)
     dim = self.outdim
     self.ingatePeepDerivs = self.derivs[:dim]
     self.forgetgatePeepDerivs = self.derivs[dim:dim*2]
     self.outgatePeepDerivs = self.derivs[dim*2:]
Пример #41
0
    def __init__(self,
                 timedim,
                 shape,
                 hiddendim,
                 outsize,
                 blockshape=None,
                 name=None):
        """Initialize an MdrnnLayer.

        The dimensionality of the sequence - for example 2 for a
        picture or 3 for a video - is given by `timedim`, while the sidelengths
        along each dimension are given by the tuple `shape`.

        The layer will have `hiddendim` hidden units per swiping direction. The
        number of swiping directions is given by 2**timedim, which corresponds
        to one swipe from each corner to its opposing corner and back.

        To indicate how many outputs per timesteps are used, you have to specify
        `outsize`.

        In order to treat blocks of the input and not single voxels, you can
        also specify `blockshape`. For example the layer will then feed (2, 2)
        chunks into the network at each timestep which correspond to the (2, 2)
        rectangles that the input can be split into.
        """
        self.timedim = timedim
        self.shape = shape
        blockshape = tuple([1] * timedim) if blockshape is None else blockshape
        self.blockshape = shape
        self.hiddendim = hiddendim
        self.outsize = outsize
        self.indim = reduce(operator.mul, shape, 1)
        self.blocksize = reduce(operator.mul, blockshape, 1)
        self.sequenceLength = self.indim / self.blocksize
        self.outdim = self.sequenceLength * self.outsize

        self.bufferlist = [('cellStates', self.sequenceLength * self.hiddendim)
                           ]

        Module.__init__(self, self.indim, self.outdim, name=name)

        # Amount of parameters that are required for the input to the hidden
        self.num_in_params = self.blocksize * self.hiddendim * (3 +
                                                                self.timedim)

        # Amount of parameters that are needed for the recurrent connections.
        # There is one of the parameter for every time dimension.
        self.num_rec_params = outsize * hiddendim * (3 + self.timedim)

        # Amount of parameters that are needed for the output.
        self.num_out_params = outsize * hiddendim

        # Amount of parameters that are needed from the bias to the hidden and
        # the output
        self.num_bias_params = (3 +
                                self.timedim) * self.hiddendim + self.outsize

        # Total list of parameters.
        self.num_params = sum(
            (self.num_in_params, self.timedim * self.num_rec_params,
             self.num_out_params, self.num_bias_params))

        ParameterContainer.__init__(self, self.num_params)

        # Some layers for internal use.
        self.hiddenlayer = MDLSTMLayer(self.hiddendim, self.timedim)

        # Every point in the sequence has timedim predecessors.
        self.predlayers = [LinearLayer(self.outsize) for _ in range(timedim)]

        # We need a single layer to hold the input. We will swipe a connection
        # over the corrects part of it, in order to feed the correct input in.
        self.inlayer = LinearLayer(self.indim)
        # Make some layers the same to save memory.
        self.inlayer.inputbuffer = self.inlayer.outputbuffer = self.inputbuffer

        # In order to allocate not too much memory, we just set the size of the
        # layer to 1 and correct it afterwards.
        self.outlayer = LinearLayer(self.outdim)
        self.outlayer.inputbuffer = self.outlayer.outputbuffer = self.outputbuffer

        self.bias = BiasUnit()
Пример #42
0
 def __init__(self, numStates, numActions, name=None):
     Module.__init__(self, 1, 1, name)
     ParameterContainer.__init__(self, numStates * numActions)
     self.numRows = numStates
     self.numColumns = numActions
Пример #43
0
 def mutate(self, *args, **kwargs):
     ParameterContainer.mutate(self, *args, **kwargs)
     self.__stored._params[:] = self._params
Пример #44
0
    def __init__(self, timedim, shape,
                 hiddendim, outsize, blockshape=None, name=None):
        """Initialize an MdrnnLayer.

        The dimensionality of the sequence - for example 2 for a
        picture or 3 for a video - is given by `timedim`, while the sidelengths
        along each dimension are given by the tuple `shape`.

        The layer will have `hiddendim` hidden units per swiping direction. The
        number of swiping directions is given by 2**timedim, which corresponds
        to one swipe from each corner to its opposing corner and back.

        To indicate how many outputs per timesteps are used, you have to specify
        `outsize`.

        In order to treat blocks of the input and not single voxels, you can
        also specify `blockshape`. For example the layer will then feed (2, 2)
        chunks into the network at each timestep which correspond to the (2, 2)
        rectangles that the input can be split into.
        """
        self.timedim = timedim
        self.shape = shape
        blockshape = tuple([1] * timedim) if blockshape is None else blockshape
        self.blockshape = shape
        self.hiddendim = hiddendim
        self.outsize = outsize
        self.indim = reduce(operator.mul, shape, 1)
        self.blocksize = reduce(operator.mul, blockshape, 1)
        self.sequenceLength = self.indim / self.blocksize
        self.outdim = self.sequenceLength * self.outsize

        self.bufferlist = [('cellStates', self.sequenceLength * self.hiddendim)]

        Module.__init__(self, self.indim, self.outdim, name=name)

        # Amount of parameters that are required for the input to the hidden
        self.num_in_params = self.blocksize * self.hiddendim * (3 + self.timedim)

        # Amount of parameters that are needed for the recurrent connections.
        # There is one of the parameter for every time dimension.
        self.num_rec_params = outsize * hiddendim * (3 + self.timedim)

        # Amount of parameters that are needed for the output.
        self.num_out_params = outsize * hiddendim

        # Amount of parameters that are needed from the bias to the hidden and
        # the output
        self.num_bias_params = (3 + self.timedim) * self.hiddendim + self.outsize

        # Total list of parameters.
        self.num_params = sum((self.num_in_params,
                               self.timedim * self.num_rec_params,
                               self.num_out_params,
                               self.num_bias_params))

        ParameterContainer.__init__(self, self.num_params)

        # Some layers for internal use.
        self.hiddenlayer = MDLSTMLayer(self.hiddendim, self.timedim)

        # Every point in the sequence has timedim predecessors.
        self.predlayers = [LinearLayer(self.outsize) for _ in range(timedim)]

        # We need a single layer to hold the input. We will swipe a connection
        # over the corrects part of it, in order to feed the correct input in.
        self.inlayer = LinearLayer(self.indim)
        # Make some layers the same to save memory.
        self.inlayer.inputbuffer = self.inlayer.outputbuffer = self.inputbuffer

        # In order to allocate not too much memory, we just set the size of the
        # layer to 1 and correct it afterwards.
        self.outlayer = LinearLayer(self.outdim)
        self.outlayer.inputbuffer = self.outlayer.outputbuffer = self.outputbuffer

        self.bias = BiasUnit()
Пример #45
0
# ------------------------

# here's the default way of setting it up: provide a function and an initial point
f = TabletFunction(2)
x0 = [2.1, 4]
l = algo(f, x0)

# f can also be a simple lambda function
l = algo(lambda x: sum(x)**2, x0)

# in the case of continuous optimization, the initial point
# can be provided as a list (above), an array...
l = algo(f, array(x0))

# ... or a ParameterContainer
pcontainer = ParameterContainer(2)
pcontainer._setParameters(x0)
l = algo(f, pcontainer)

# the initial point can be omitted if:
# a) the problem dimension is specified manually
l = algo(f, numParameters = 2)

# b) the function is a FunctionEnvironment that specifies the problem dimension itself
l = algo(f)

# but if none is the case this leads to an error:
try:
    l = algo(lambda x: sum(x)**2)
except ValueError as e:
    print('Error caught:', e)
Пример #46
0
 def __init__(self, *args, **kwargs):
     Connection.__init__(self, *args, **kwargs)
     ParameterContainer.__init__(self, self.indim*self.outdim)
Пример #47
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     size = self.dim
     self.ingatePeepWeights = self.params[:size]
     self.forgetgatePeepWeights = self.params[size:size*(1 + self.dimensions)]
     self.outgatePeepWeights = self.params[size*(1 + self.dimensions):]
Пример #48
0
 def __init__(self, *args, **kwargs):
     IdentityConnection.__init__(self, *args, **kwargs)
     ParameterContainer.__init__(self, self.indim)
Пример #49
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     dim = self.outdim
     self.ingatePeepWeights = self.params[:dim]
     self.forgetgatePeepWeights = self.params[dim:dim * 2]
     self.outgatePeepWeights = self.params[dim * 2:]
Пример #50
0
 def _setDerivatives(self, d, owner=None):
     ParameterContainer._setDerivatives(self, d, owner)
     dim = self.outdim
     self.ingatePeepDerivs = self.derivs[:dim]
     self.forgetgatePeepDerivs = self.derivs[dim:dim * 2]
     self.outgatePeepDerivs = self.derivs[dim * 2:]
Пример #51
0
 def randomize(self, *args, **kwargs):
     ParameterContainer.randomize(self, *args, **kwargs)
     self.__stored._params[:] = self._params
Пример #52
0
 def __init__(self, nbparams, **args):
     assert nbparams > 0
     ParameterContainer.__init__(self, nbparams, **args)
     self.setArgs(nbparams = self.paramdim)
Пример #53
0
 def __init__(self, *args, **kwargs):
     Connection.__init__(self, *args, **kwargs)
     ParameterContainer.__init__(self, self.indim * self.outdim)
task.N = 10
# for the simple evolvable class defined below
evoEval = lambda e: e.x


# starting points
# ----------------------
xlist1 = [2.]
xlist2 = [0.2, 10]
xlist100 = list(range(12, 112))

xa1 = array(xlist1)
xa2 = array(xlist2)
xa100 = array(xlist100)

pc1 = ParameterContainer(1)
pc2 = ParameterContainer(2)
pc100 = ParameterContainer(100)
pc1._setParameters(xa1)
pc2._setParameters(xa2)
pc100._setParameters(xa100)

# for the task object, we need a module
nnet = buildNetwork(task.outdim, 2, task.indim)

# a mimimalistic Evolvable subclass that is not (like usual) a ParameterContainer
class SimpleEvo(Evolvable):
    def __init__(self, x): self.x = x
    def mutate(self):      self.x += random() - 0.3
    def copy(self):        return SimpleEvo(self.x)
    def randomize(self):   self.x = 10 * random() - 2
Пример #55
0
 def __init__(self, nbparams, **args):
     assert nbparams > 0
     ParameterContainer.__init__(self, nbparams, **args)
     self.setArgs(nbparams = self.paramdim)