Ejemplo n.º 1
0
    def poisciParametre(self, X,Y):

        activation_functions = ['multiquadric' , 'softlim', 'inv_multiquadric', 'gaussian', 'tanh', 'sine', 'tribas', 'inv_tribas', 'sigmoid']

        n_hiddens = [50, 100,200, 500,800, 900, 1000, 1500, 2000, 3000, 5000, 10000]#3, 30,50,100]
        parameters = []
        alphas = [1.0,0.7,0.5,0.0]#0.0,0.2,0.4,0.5,0.7,0.9,1.0]
        nrOfTrials = len(activation_functions)*len(alphas) * len(n_hiddens)
        trial = 1
        np.random.seed(np.random.randint(10000000))
        for n_hidden in  n_hiddens:
            for alpha in alphas:
                for actFunction in activation_functions:
                    cls = GenELMClassifier(hidden_layer = RandomLayer(n_hidden = n_hidden, activation_func = actFunction, alpha=alpha))


                    parameter = Helpers.cv(X,Y,cls,5, printing = False)
                    parameter = parameter+ [n_hidden, alpha, actFunction, "normal"]
                    parameters.append(parameter)
                    print(parameter, "%d/%d" %(trial,nrOfTrials))
                    Helpers.pickleListAppend2(parameter, "parametersELM.p")

                    # parameter = Helpers.cv(X,Y,BaggingClassifier(cls,n_estimators=30),10, printing = False)
                    # parameter = parameter+ [n_hidden, alpha, actFunction, "bagged"]
                    # parameters.append(parameter)
                    # print(parameter, "%d/%d" %(trial,nrOfTrials))

                    trial = trial+1
        # pickle.dump(parameters,open("parametersMultiQuadric.p","wb"))
        return
Ejemplo n.º 2
0
def main3():
    #Testira nekaj algoritmov
    X, Y = readData(trainFtrFile="data/trainFtrExtended_200f.csv",
                    trainClsFile= "data/trainClsExtended.csv", deleteFirstNFeatures=2)
    # X, Y = readData(1000)
    elmc = GenELMClassifier(hidden_layer = RandomLayer(n_hidden = 50, activation_func = 'multiquadric', alpha=1.0))
    baggedElmc = BaggingClassifier(elmc, n_estimators=10)
    ada = AdaBoostClassifier(n_estimators=30)
    rf = sklearn.ensemble.RandomForestClassifier(n_estimators = 50, n_jobs = 1)
    np.random.seed(np.random.randint(10000000))
    adaRf = AdaBoostClassifier(rf, n_estimators=20)

    print ("elmc: ", Helpers.cv(X,Y,elmc))
    print ("baggedElmc: ", Helpers.cv(X,Y,baggedElmc))
    print ("adaTree: ", Helpers.cv(X,Y,ada))
    print ("rf: ", Helpers.cv(X,Y,rf))
    print ("adaRf: ", Helpers.cv(X,Y,adaRf))
Ejemplo n.º 3
0
def main5():
    #Cross validation 3 algoritmov
    X, Y = readData()
    tree = DecisionTreeClassifier()
    elmc = GenELMClassifier(hidden_layer = RandomLayer(n_hidden = 100, activation_func = 'multiquadric', alpha=1.0))
    baggedelmc = Bagging(elmc, n_estimators=20,ratioCutOffEstimators=0)

    print ("baggedelmc: ", Helpers.cv(X,Y,baggedelmc))
Ejemplo n.º 4
0
def main1():
    X, Y = readData(10000)
    elmc = ELMClassifier(n_hidden=100, activation_func='gaussian')
    baggedElmc = BaggingClassifier(elmc)

    #baggedElmc.fit(X,Y)
    rhl = RandomLayer(n_hidden=500, activation_func='gaussian')
    genElmc = GenELMClassifier(rhl)


    tr, ts, trRaw, tsRaw, prec, precTress, duration = Helpers.cv(X,Y,baggedElmc,3, printing=True)
    plt.scatter(tr, ts, alpha=0.5, marker='D', c='r')
    plt.scatter(trRaw, tsRaw, alpha=0.5, marker='D', c='b')
    plt.show()
    plt.scatter(prec, precTress,alpha=0.9, marker='D', c='r')
    plt.show()