Ejemplo n.º 1
0
 def _preProcessingData(self, x, y):
     for i in range(len(y.data)):
         for k in range(len(x.data[0])):
             if (type(x.data[i][k]) is not Rdm):
                 x.data[i][k] = Rdmia.number(x.data[i][k])
         if (type(y.data[i]) is not Rdm):
             y.data[i][0] = Rdmia.number(y.data[i][0])
Ejemplo n.º 2
0
def rosenbrock(x):  # rosen.m
    """ http://en.wikipedia.org/wiki/Rosenbrock_function """
    # a sum of squares, so LevMar (scipy.optimize.leastsq) is pretty good
    x = np.asarray_chkfinite(x)
    x0 = x[:-1]
    x1 = x[1:]
    return (sum((1.0 - x0)**2.0) + rdmia.number(100.0) * sum(
        (x1 - x0**2.0)**2.0))
Ejemplo n.º 3
0
def makeList(steps, dim):
    result = []
    r = []
    for k in steps:
        for i in range(dim):
            r.append(rdmia.number(k))
        result.append(r)
        r = []

    return result
Ejemplo n.º 4
0
    def predict(self, x):
        r_list = []
        if (self._isMult):
            for val in range(len(x)):
                resultLow = sum([
                    self._predictorsLow.data[i] * x[val][i].lower()
                    for i in range(len(self._predictorsLow.data))
                ])
                resultUp = sum([
                    self._predictorsUp.data[i] * x[val][i].upper()
                    for i in range(len(self._predictorsUp.data))
                ])
                r_list.append(Rdmia.number(resultLow, resultUp))
        else:
            for val in range(len(x)):
                result = sum([
                    self._predictors.data[i] * x[val][i]
                    for i in range(len(self._predictors.data))
                ])
                r_list.append(result)
                #r = qm.midpoint(self._predictors.data[0] + self._predictors.data[1]*x.data[val][1])

        return r_list
Ejemplo n.º 5
0
    def _mean(self, d):
        mean = Rdmia.number(0.0)
        for val in d:
            mean += val[0]

        return mean / len(d)