Ejemplo n.º 1
0
def train_anf(anfis, membership, mf, x_train, y_train, x_val, y_val, epc=10):
	best_result = np.Inf
	mfc = membership.membershipfunction.MemFuncs(mf)
	anf = anfis.ANFIS(x_train, y_train, mfc)
	anf.trainHybridJangOffLine(epochs=epc)
	prev = anfis.predict(anf, x_val)
	mse  = MSE(y_val, prev)

	return anf, mse
Ejemplo n.º 2
0
    def training(self):
        ts = numpy.loadtxt("anfis/traindaya.txt", usecols=[0, 1, 2])
        X = ts[:, 0:2]
        Y = ts[:, 2]

        mf = [[['gaussmf', {
            'mean': -11.,
            'sigma': 5.
        }], ['gaussmf', {
            'mean': -8.,
            'sigma': 5.
        }], ['gaussmf', {
            'mean': -14.,
            'sigma': 20.
        }], ['gaussmf', {
            'mean': -7.,
            'sigma': 7.
        }]],
              [['gaussmf', {
                  'mean': -10.,
                  'sigma': 20.
              }], ['gaussmf', {
                  'mean': -20.,
                  'sigma': 11.
              }], ['gaussmf', {
                  'mean': -9.,
                  'sigma': 30.
              }], ['gaussmf', {
                  'mean': -10.5,
                  'sigma': 5.
              }]]]

        mfc = membership.membershipfunction.MemFuncs(mf)
        self.anf = anfis.ANFIS(X, Y, mfc)
        self.anf.trainHybridJangOffLine(epochs=self.epochs)
        # print round(anf.consequents[-1][0],6)
        # print round(anf.consequents[-2][0],6)
        # print round(anf.fittedValues[9][0],6)
        if round(self.anf.consequents[-1][0], 6) == -5.275538 and round(
                self.anf.consequents[-2][0], 6) == -1.990703 and round(
                    self.anf.fittedValues[9][0], 6) == 0.002249:
            print 'test is good'
Ejemplo n.º 3
0
    [
        ['gaussmf',{'mean':0.,'sigma':1.}],
        ['gaussmf',{'mean':-1.,'sigma':2.}],
        ['gaussmf',{'mean':-4.,'sigma':10.}],
        ['gaussmf',{'mean':-7.,'sigma':7.}]
    ],
    [
        ['gaussmf',{'mean':1.,'sigma':2.}],
        ['gaussmf',{'mean':2.,'sigma':3.}],
        ['gaussmf',{'mean':-2.,'sigma':10.}],
        ['gaussmf',{'mean':-10.5,'sigma':5.}]
    ]
]

mfc = membership.membershipfunction.MemFuncs(mf)
anf = anfis.ANFIS(X, Y, mfc)
anf.trainHybridJangOffLine(epochs=20)
print round(anf.consequents[-1][0], 6)
print round(anf.consequents[-2][0], 6)
print round(anf.fittedValues[9][0], 6)
if round(anf.consequents[-1][0], 6) == -5.275538 and round(
        anf.consequents[-2][0], 6) == -1.990703 and round(
            anf.fittedValues[9][0], 6) == 0.002249:
    print 'test is good'
anf.plotErrors()
anf.plotResults()

# Matt's test
""" mf = \
    [
        [
Ejemplo n.º 4
0
def test():

    ts = pd.read_csv('event_driven_model.csv',
                     usecols=[1, 2, 3, 4, 5, 6, 7, 8],
                     encoding='gbk')

    # 处理异常值
    ts.fillna(0, inplace=True)

    # 设置日期的起点。
    dcounter = partial(tradedays_count, datetime(2010, 12, 30))
    # 输入时间规则化
    days = [datetime.strptime(str(time), '%Y%m%d') for time in ts['time']]
    days = dcounter(np.asarray(days))
    ts['time'] = days

    # 其他输入变量预处理
    ts['second_exist'] = ts['second_exist'].astype(np.int32)
    ts.ix[ts['second_exist'] == 0, [5, 6, 7]] = 0
    for col in range(ts.shape[1]):
        if col == 0:
            continue
        ts.ix[:, col] = ts.ix[:, col].astype(np.float32)
    ts['gap_time'] /= 360.0  # 归一化
    for col in [0, 1, 2, 3]:
        ts.ix[:, col] = (ts.ix[:, col] - ts.ix[:, col].min()) / (
            ts.ix[:, col].max() - ts.ix[:, col].min())
    print(ts)

    ts = np.asarray(ts)
    X = ts[:, 0:4]  # 输入变量
    Y = ts[:, 4:8]  # 输出变量

    # 设置隶属度函数
    # mf_time = [['sigmf', {'b': dcounter(datetime(2014, 1, 1)), 'c': -0.05}],
    #            ['gaussmf', {'mean': dcounter(datetime(2015, 1, 1)), 'sigma': 100}],
    #            ['sigmf', {'b': dcounter(datetime(2015, 6, 30)), 'c': 0.2}],
    #            ['gaussmf', {'mean': dcounter(datetime(2015, 11, 15)), 'sigma': 30}]]
    mf_time = [['gaussmf', {
        'mean': 0.87,
        'sigma': 0.1
    }], ['gaussmf', {
        'mean': 0.92,
        'sigma': 0.3
    }]]

    mf_class = [['gaussmf', {
        'mean': 0.1,
        'sigma': 0.05
    }], ['gaussmf', {
        'mean': 0.3,
        'sigma': 0.1
    }], ['gaussmf', {
        'mean': 0.6,
        'sigma': 0.1
    }], ['gaussmf', {
        'mean': 0.9,
        'sigma': 0.1
    }]]

    # mf_low = [['gaussmf', {'mean': 1, 'sigma': 5}],
    #           ['gaussmf', {'mean': 10, 'sigma': 10}],
    #           ['sigmf', {'b': 60, 'c': 0.01}]]
    mf_low = [['gaussmf', {
        'mean': 0.2,
        'sigma': 0.1
    }], ['gaussmf', {
        'mean': 0.8,
        'sigma': 0.1
    }]]

    mf_risepercent = [['gaussmf', {
        'mean': 0.1,
        'sigma': 0.05
    }], ['gaussmf', {
        'mean': 0.5,
        'sigma': 0.3
    }]]
    # mf_risepercent = [['gaussmf', {'mean': 0.1, 'sigma': 0.1}],
    #                   ['gaussmf', {'mean': 0.5, 'sigma': 0.5}],
    #                   ['sigmf', {'b': 1, 'c': 0.02}]]

    mf = [mf_time, mf_class, mf_low, mf_risepercent]
    # mf = [mf_time, mf_class]
    and_func = ['mamdani', 'T-S']

    mfc = membership.membershipfunction.MemFuncs(mf)
    anf = anfis.ANFIS(X, Y, mfc, andfunc=and_func[0])
    anf.trainHybridJangOffLine(epochs=15, eta=0.00001)

    print(anf.fittedValues)
    print('mf list:')
    print(anf.memFuncs)
    anf.plotErrors()
Ejemplo n.º 5
0
# In[112]:

from membership import membershipfunction

mfc = membershipfunction.MemFuncs(mf)

# In[113]:

power_to_be_generated = df.pop('power_to_be_generated')

# In[114]:

import anfis

anf = anfis.ANFIS(df, power_to_be_generated, mfc)
train = anf.trainHybridJangOffLine(epochs=20)

# In[115]:

anf.plotErrors()

# In[116]:

anf.plotResults()

# In[104]:

print(train)

# In[ ]:
Ejemplo n.º 6
0
           'gaussmf', {
               'mean': np.mean(np.arange(45, 76)),
               'sigma': np.std(np.arange(45, 76))
           }
       ],
       [
           'gaussmf', {
               'mean': np.mean(np.arange(55, 86)),
               'sigma': np.std(np.arange(55, 86))
           }
       ]]]

from membership import membershipfunction
mfc = membershipfunction.MemFuncs(mf)

import anfis
anf = anfis.ANFIS(train_data, train_label, mfc)

pred_train = anf.trainHybridJangOffLine(epochs=20)

train_label = np.reshape(train_label, [1, len(train_label)])
test_label = np.reshape(test_label, [1, len(test_label)])
print(train_label.shape)
print(test_label.shape)

error = np.mean((pred_train - train_label)**2)

print(error)
anf.plotErrors()
anf.plotResults()
Ejemplo n.º 7
0
c = np.asarray(datase["Result"])
img = ax.scatter(x, y, z, c=c, cmap=plt.hot())
fig.colorbar(img)
plt.show()

"""preparing membership function """
mf = [[['gaussmf',{'mean':0.,'sigma':1.}],['gaussmf',{'mean':-1.,'sigma':2.}],['gaussmf',{'mean':-4.,'sigma':10.}],['gaussmf',{'mean':-7.,'sigma':7.}]],
            [['gaussmf',{'mean':1.,'sigma':2.}],['gaussmf',{'mean':2.,'sigma':3.}],['gaussmf',{'mean':-2.,'sigma':10.}],['gaussmf',{'mean':-10.5,'sigma':5.}]],
            [['gaussmf',{'mean':0.,'sigma':1.}],['gaussmf',{'mean':-1.,'sigma':2.}],['gaussmf',{'mean':-4.,'sigma':10.}],['gaussmf',{'mean':-7.,'sigma':7.}]],
            [['gaussmf',{'mean':1.,'sigma':2.}],['gaussmf',{'mean':2.,'sigma':3.}],['gaussmf',{'mean':-2.,'sigma':10.}],['gaussmf',{'mean':-10.5,'sigma':5.}]]]


mfc=membershipfunction.MemFuncs(mf)

"""training of the model"""
anf=anfis.ANFIS(dataset, target, mfc)

out=anf.trainHybridJangOffLine(epochs=10)

"""graphical plot of errors"""
anf.plotErrors()

"""actual vs trained graph"""
anf.plotResults()


tt=np.asarray(target)

"""cleaning of out values"""
ll=[]
for i in out:
Ejemplo n.º 8
0
}], ['gaussmf', {
    'mean': -7.,
    'sigma': 7.
}]],
      [['gaussmf', {
          'mean': -10.,
          'sigma': 20.
      }], ['gaussmf', {
          'mean': -20.,
          'sigma': 11.
      }], ['gaussmf', {
          'mean': -9.,
          'sigma': 30.
      }], ['gaussmf', {
          'mean': -10.5,
          'sigma': 5.
      }]]]

mfc = membership.membershipfunction.MemFuncs(mf)
anf = anfis.ANFIS(all_inputs, all_targets, mfc)
anf.trainHybridJangOffLine(epochs=10)
print round(anf.consequents[-1][0], 6)
print round(anf.consequents[-2][0], 6)
print round(anf.fittedValues[9][0], 6)
if round(anf.consequents[-1][0], 6) == -5.275538 and round(
        anf.consequents[-2][0], 6) == -1.990703 and round(
            anf.fittedValues[9][0], 6) == 0.002249:
    print 'test is good'
anf.plotErrors()
anf.plotResults()
Ejemplo n.º 9
0
    """
    args_PSO = (args[0], args[1])
    learners = args[2]
    nPop = theta.shape[0]

    J = np.zeros(nPop)
    for i in range(nPop):
        J[i] = learners[i].create_model(theta[i, :], args_PSO)

    return J


# Init learners (one for each particle)
learners = []
for i in range(nPop):
    learners.append(anf.ANFIS(n_mf=n_mf, n_outputs=n_outputs, problem=problem))

# Always normalize inputs
Xn_tr, norm_param = utl.normalize_data(X_tr)
Xn_te = utl.normalize_data(X_te, norm_param)

# Build boundaries using heuristic rules
LB, UB = utl.bounds_pso(Xn_tr,
                        n_mf,
                        n_outputs,
                        mu_delta=mu_delta,
                        s_par=s_par,
                        c_par=c_par,
                        A_par=A_par)

# Scale output(s) in continuous problems to reduce the range in <A_par>