Esempio n. 1
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     size = self.dim
     self.ingatePeepWeights = self.params[:size]
     self.forgetgatePeepWeights = self.params[size:size *
                                              (1 + self.dimensions)]
     self.outgatePeepWeights = self.params[size * (1 + self.dimensions):]
Esempio n. 2
0
 def _setInitEvaluable(self, evaluable):
     if evaluable is None:
         # if there is no initial point specified, we start at one that's sampled 
         # normally around the origin.
         if self.numParameters is not None:
             evaluable = randn(self.numParameters)
         else:
             raise ValueError('Could not determine the dimensionality of the evaluator. '+\
                              'Please provide an initial search point.')   
     if isinstance(evaluable, list):
         evaluable = array(evaluable)
     
     # If the evaluable is provided as a list of numbers or as an array,
     # we wrap it into a ParameterContainer.
     if isinstance(evaluable, ndarray):            
         pc = ParameterContainer(len(evaluable))
         pc._setParameters(evaluable)
         self._wasWrapped = True
         evaluable = pc
     self._initEvaluable = evaluable
     if isinstance(self._initEvaluable, ParameterContainer):
         if self.numParameters is None:            
             self.numParameters = len(self._initEvaluable)
         elif self.numParameters is not len(self._initEvaluable):
             raise ValueError("Parameter dimension mismatch: evaluator expects "+str(self.numParameters)\
                              +" but the evaluable has "+str(len(self._initEvaluable))+".")
Esempio n. 3
0
 def _setParameters(self, p, owner=None):
     """ put slices of this array back into the modules """
     ParameterContainer._setParameters(self, p, owner)
     index = 0
     for x in self._containerIterator():
         x._setParameters(self.params[index:index + x.paramdim], self)
         index += x.paramdim
 def _setParameters(self, p, owner = None):
     ParameterContainer._setParameters(self, p, owner)
     dim = self.outdim
     self.ingatePeepWeights = self.params[:dim]
     self.forgetgatePeepWeights = self.params[dim:dim*2]
     self.outgatePeepWeights = self.params[dim*2:]
Esempio n. 5
0
evoEval = lambda e: e.x

# starting points
# ----------------------
xlist1 = [2.]
xlist2 = [0.2, 10]
xlist100 = list(range(12, 112))

xa1 = array(xlist1)
xa2 = array(xlist2)
xa100 = array(xlist100)

pc1 = ParameterContainer(1)
pc2 = ParameterContainer(2)
pc100 = ParameterContainer(100)
pc1._setParameters(xa1)
pc2._setParameters(xa2)
pc100._setParameters(xa100)

# for the task object, we need a module
nnet = buildNetwork(task.outdim, 2, task.indim)


# a mimimalistic Evolvable subclass that is not (like usual) a ParameterContainer
class SimpleEvo(Evolvable):
    def __init__(self, x):
        self.x = x

    def mutate(self):
        self.x += random() - 0.3