Beispiel #1
0
 def on_all_m_click(self, do=0):
     # 0 = get current memory or 1 = plus memory or 2 = minus memory or 3 = clear memory
     if do == 3:
         self.memory = 0.0
     elif not self.exp.is_error():
         self.update_expression()
         if do == 0:
             self.history.out()
             r = str(self.memory)
             if r.endswith('.0'):
                 r = r.replace('.0', '')
             self.exp.put_data_on_exp(r)
             self.update_monitor()
         elif do in (1, 2):
             res = get_result(self.exp.exp, self.ui.radio_bt_1.isChecked())
             if error(res):
                 self.history.out()
                 self.last_invalid_exp = self.exp.exp
                 self.exp.set_exp(res)
                 self.update_monitor()
             else:
                 try:
                     exp_float = float(res)
                     self.memory += exp_float if do == 1 else -exp_float
                 except:
                     self.history.out()
                     self.last_invalid_exp = self.exp.exp
                     self.exp.set_exp(ERRORS[2])
                     self.update_monitor()
Beispiel #2
0
 def on_equal_click(self):
     self.update_expression()
     if not self.exp.is_error():
         self.history.out()
         res = get_result(self.exp.exp, self.ui.radio_bt_1.isChecked())
         if self.make_power_enabled:
             self.on_power_click()
         if error(res):
             self.last_invalid_exp = self.exp.exp
         else:
             self.history.history.append(self.exp.exp)
         self.exp.set_exp(res)
         self.update_monitor()
Beispiel #3
0
def func(name):
    eps = 0.0001
    para = parameter.copy()
    para["r"] += random.uniform(-eps, eps)
    para["lambda"] += random.uniform(-eps, eps)
    while para["r"] > 2 or para["r"] < 0:
        para["r"] += random.uniform(-eps, eps)
    while para["lambda"] > 2 or para["lambda"] < 0:
        para["lambda"] += random.uniform(-eps, eps)
    ans = tools.get_result(para, 6)
    print('process', os.getpid(), ans, para.__str__())
    global min_val
    filename = os.path.split(__file__)[-1].split(".")[0] + '.txt'
    with open(filename, "a") as f:
        f.write(str(os.getpid()) + ',' + name + ',' + str(ans) + ',' + str(para["r"]) + ',' + str(para["lambda"]) + ',' + str(
            para["epoch"]) + ',' + str(para["n"]) + ',' + '\n')
def func(name):
    eps = 0.001
    para = parameter.copy()
    para["r"] += random.uniform(-eps, eps)
    para["lambda"] += random.uniform(-eps, eps)
    while para["r"] > 2 or para["r"] < 0:
        para["r"] += random.uniform(-eps, eps)
    while para["lambda"] > 2 or para["lambda"] < 0:
        para["lambda"] += random.uniform(-eps, eps)
    ans = tools.get_result(para, 12)
    print('process', os.getpid(), ans, para.__str__())
    global min_val
    with open("./SA_F12_3.txt", "a") as f:
        f.write(
            str(os.getpid()) + ',' + str(ans) + ',' + str(para["r"]) + ',' +
            str(para["lambda"]) + ',' + str(para["epoch"]) + ',' +
            str(para["n"]) + ',' + '\n')
@author: yangydeng
"""

import pandas as pd
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import ExtraTreesRegressor
from sklearn.ensemble import GradientBoostingRegressor
import sys
sys.path.append('../tools')
from tools import get_result,draw_feature_importance

day_time = '_03_01_3'

train_x = pd.read_csv('../train_0/train_x'+day_time+'.csv')
train_y = pd.read_csv('../train_0/train_y'+day_time+'.csv')
test_x = pd.read_csv('../test_0/test_x'+day_time+'.csv')


#RF = RandomForestRegressor(n_estimators=1200,random_state=1,n_jobs=-1,min_samples_split=2,min_samples_leaf=2,max_depth=25)
#RF.fit(train_x,train_y)
#pre = (RF.predict(test_x)).round()

ET = ExtraTreesRegressor(n_estimators=1200,random_state=1,n_jobs=-1,min_samples_split=2,min_samples_leaf=2,max_depth=25,max_features='sqrt',bootstrap=0)
ET.fit(train_x,train_y)
pre = (ET.predict(test_x)).round()


result = get_result(pre)

result.to_csv('../results/result'+day_time+'.csv',index=False,header=False)
#draw_feature_importance(train_x,ET)
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 09 10:21:49 2017

@author: Administrator
"""

import pandas as pd
from sklearn.ensemble import RandomForestRegressor
import sys
sys.path.append('../tools')
from tools import get_result

day_time = '_02_11_2'

train_x = pd.read_csv('../train_2/train_x'+day_time+'.csv')
train_y = pd.read_csv('../train_2/train_y'+day_time+'.csv')
test_x = pd.read_csv('../test_2/test_x'+day_time+'.csv')


RF = RandomForestRegressor(n_estimators=500,random_state=1,n_jobs=-1,min_samples_split=2,min_samples_leaf=2,max_depth=25)
RF.fit(train_x,train_y)
pre = (RF.predict(test_x)).round()

result = get_result(pre)

result.to_csv('../results/result'+day_time+'.csv',index=False,header=False)
param = {'subsample':[1,1,1,1,1,1,1],'min_samples_leaf':[1,1,1,1,1,1,1],'n_estimators':[200,100,200,200,200,200,100],'min_samples_split':[4,8,2,8,2,4,4],\
        'learning_rate':[0.05,0.1,0.05,0.05,0.05,0.05,0.1],'max_features':[270,'auto',280,'auto',270,280,270],'random_state':[1,1,1,1,1,1,1]\
        ,'max_depth':[4,6,4,4,4,4,4]}

result = DataFrame()

for i in range(0, 7):
    GB = GradientBoostingRegressor(n_estimators=param['n_estimators'][i],learning_rate=0.05,random_state=1,\
                                min_samples_split=param['min_samples_split'][i],min_samples_leaf=1,max_depth=param['max_depth'][i],max_features=param['max_features'][i],subsample=0.85)

    GB.fit(train_x, train_y.icol(i))
    pre = (GB.predict(test_x)).round()

    result['col' + str(i)] = pre

result = get_result(result.values)
result.to_csv('../results/result' + day_time + '.csv',
              index=False,
              header=False)

#draw_feature_importance(train_x,ET)

#0: {'subsample': 1, 'learning_rate': 0.05, 'min_samples_leaf': 1, \
#'n_estimators': 200, 'min_samples_split': 4, 'random_state': 1, 'max_features': 270, 'max_depth': 4}

#1: {'subsample': 1, 'learning_rate': 0.1, 'min_samples_leaf': 3,\
# 'n_estimators': 100, 'min_samples_split': 8, 'random_state': 1, 'max_features': auto, 'max_depth': 6}

#2: {'subsample': 1, 'learning_rate': 0.05, 'min_samples_leaf': 1,\
# 'n_estimators': 200, 'min_samples_split': 2, 'random_state': 1, 'max_features': 280, 'max_depth': 4}