Пример #1
0
 def _setInitEvaluable(self, evaluable):
     if evaluable is None:
         # if there is no initial point specified, we start at one that's sampled 
         # normally around the origin.
         if self.numParameters is not None:
             evaluable = randn(self.numParameters)
         else:
             raise ValueError('Could not determine the dimensionality of the evaluator. '+\
                              'Please provide an initial search point.')   
     if isinstance(evaluable, list):
         evaluable = array(evaluable)
     
     # If the evaluable is provided as a list of numbers or as an array,
     # we wrap it into a ParameterContainer.
     if isinstance(evaluable, ndarray):            
         pc = ParameterContainer(len(evaluable))
         pc._setParameters(evaluable)
         self._wasWrapped = True
         evaluable = pc
     self._initEvaluable = evaluable
     if isinstance(self._initEvaluable, ParameterContainer):
         if self.numParameters is None:            
             self.numParameters = len(self._initEvaluable)
         elif self.numParameters != len(self._initEvaluable):
             raise ValueError("Parameter dimension mismatch: evaluator expects "+str(self.numParameters)\
                              +" but the evaluable has "+str(len(self._initEvaluable))+".")
Пример #2
0
 def _setParameters(self, p, owner=None):
     """ put slices of this array back into the modules """
     ParameterContainer._setParameters(self, p, owner)
     index = 0
     for x in self._containerIterator():
         x._setParameters(self.params[index:index + x.paramdim], self)
         index += x.paramdim
Пример #3
0
 def _setInitEvaluable(self, evaluable):
     if evaluable is None:
         # if there is no initial point specified, we start at one that's sampled 
         # normally around the origin.
         if self.numParameters is not None:
             evaluable = randn(self.numParameters)
         else:
             raise ValueError('Could not determine the dimensionality of the evaluator. '+\
                              'Please provide an initial search point.')   
     if isinstance(evaluable, list):
         evaluable = array(evaluable)
     
     # If the evaluable is provided as a list of numbers or as an array,
     # we wrap it into a ParameterContainer.
     if isinstance(evaluable, ndarray):            
         pc = ParameterContainer(len(evaluable))
         pc._setParameters(evaluable)
         self._wasWrapped = True
         evaluable = pc
     self._initEvaluable = evaluable
     if isinstance(self._initEvaluable, ParameterContainer):
         if self.numParameters is None:            
             self.numParameters = len(self._initEvaluable)
         elif self.numParameters != len(self._initEvaluable):
             raise ValueError("Parameter dimension mismatch: evaluator expects "+str(self.numParameters)\
                              +" but the evaluable has "+str(len(self._initEvaluable))+".")
Пример #4
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     size = self.dim
     self.ingatePeepWeights = self.params[:size]
     self.forgetgatePeepWeights = self.params[size:size *
                                              (1 + self.dimensions)]
     self.outgatePeepWeights = self.params[size * (1 + self.dimensions):]
Пример #5
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     nrNeurons = self.outdim
     first, second = 0, 0
     first, second = second, second + indim * nrNeurons
     self.ingateConns = self.params[first:second]
     first, second = second, second + indim * nrNeurons
     self.forgetgateConns = self.params[first:second]
     first, second = second, second + indim * nrNeurons
     self.cellConns = self.params[first:second]
     first, second = second, second + indim * nrNeurons
     self.outgateConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.ingateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.forgetgateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.cellRecConns = self.params[first:second]
     first, second = second, second + nrNeurons * nrNeurons
     self.outgateRecConns = self.params[first:second]
     if self.peep:
         first, second = second, second + nrNeurons
         self.ingatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.forgetgatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.outgatePeepWeights = self.params[first:second]
Пример #6
0
 def _setParameters(self, p, owner = None):
     ParameterContainer._setParameters(self, p, owner)
     nrNeurons = self.outdim
     first, second = 0, 0
     first, second = second, second + indim*nrNeurons
     self.ingateConns = self.params[first:second]
     first, second = second, second + indim*nrNeurons
     self.forgetgateConns = self.params[first:second]
     first, second = second, second + indim*nrNeurons
     self.cellConns = self.params[first:second]
     first, second = second, second + indim*nrNeurons
     self.outgateConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.ingateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.forgetgateRecConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.cellRecConns = self.params[first:second]
     first, second = second, second + nrNeurons*nrNeurons
     self.outgateRecConns = self.params[first:second]
     if self.peep:
         first, second = second, second + nrNeurons
         self.ingatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.forgetgatePeepWeights = self.params[first:second]
         first, second = second, second + nrNeurons
         self.outgatePeepWeights = self.params[first:second]

# starting points
# ----------------------
xlist1 = [2.]
xlist2 = [0.2, 10]
xlist100 = list(range(12, 112))

xa1 = array(xlist1)
xa2 = array(xlist2)
xa100 = array(xlist100)

pc1 = ParameterContainer(1)
pc2 = ParameterContainer(2)
pc100 = ParameterContainer(100)
pc1._setParameters(xa1)
pc2._setParameters(xa2)
pc100._setParameters(xa100)

# for the task object, we need a module
nnet = buildNetwork(task.outdim, 2, task.indim)

# a mimimalistic Evolvable subclass that is not (like usual) a ParameterContainer
class SimpleEvo(Evolvable):
    def __init__(self, x): self.x = x
    def mutate(self):      self.x += random() - 0.3
    def copy(self):        return SimpleEvo(self.x)
    def randomize(self):   self.x = 10 * random() - 2
    def __repr__(self):     return '--%.3f--' % self.x

evo1 = SimpleEvo(-3.)
Пример #8
0
 def _setParameters(self, p, owner = None):
     ParameterContainer._setParameters(self, p, owner)
     dim = self.outdim
     self.ingatePeepWeights = self.params[:dim]
     self.forgetgatePeepWeights = self.params[dim:dim*2]
     self.outgatePeepWeights = self.params[dim*2:]
Пример #9
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     dim = self.outdim
     self.ingatePeepWeights = self.params[:dim]
     self.forgetgatePeepWeights = self.params[dim:dim * 2]
     self.outgatePeepWeights = self.params[dim * 2:]
Пример #10
0
 def _setParameters(self, p, owner=None):
     ParameterContainer._setParameters(self, p, owner)
     size = self.dim
     self.ingatePeepWeights = self.params[:size]
     self.forgetgatePeepWeights = self.params[size:size*(1 + self.dimensions)]
     self.outgatePeepWeights = self.params[size*(1 + self.dimensions):]
Пример #11
0
# here's the default way of setting it up: provide a function and an initial point
f = TabletFunction(2)
x0 = [2.1, 4]
l = algo(f, x0)

# f can also be a simple lambda function
l = algo(lambda x: sum(x)**2, x0)

# in the case of continuous optimization, the initial point
# can be provided as a list (above), an array...
l = algo(f, array(x0))

# ... or a ParameterContainer
pcontainer = ParameterContainer(2)
pcontainer._setParameters(x0)
l = algo(f, pcontainer)

# the initial point can be omitted if:
# a) the problem dimension is specified manually
l = algo(f, numParameters = 2)

# b) the function is a FunctionEnvironment that specifies the problem dimension itself
l = algo(f)

# but if none is the case this leads to an error:
try:
    l = algo(lambda x: sum(x)**2)
except ValueError as e:
    print('Error caught:', e)
Пример #12
0
# here's the default way of setting it up: provide a function and an initial point
f = TabletFunction(2)
x0 = [2.1, 4]
l = algo(f, x0)

# f can also be a simple lambda function
l = algo(lambda x: sum(x)**2, x0)

# in the case of continuous optimization, the initial point
# can be provided as a list (above), an array...
l = algo(f, array(x0))

# ... or a ParameterContainer
pcontainer = ParameterContainer(2)
pcontainer._setParameters(x0)
l = algo(f, pcontainer)

# the initial point can be omitted if:
# a) the problem dimension is specified manually
l = algo(f, numParameters = 2)

# b) the function is a FunctionEnvironment that specifies the problem dimension itself
l = algo(f)

# but if none is the case this leads to an error:
try:
    l = algo(lambda x: sum(x)**2)
except ValueError as e:
    print('Error caught:', e)