regModel = LinearRegression(fit_intercept=True,
                            normalize=False,
                            copy_X=True,
                            n_jobs=1)

finRes = []
for i in range(4):
    X = np.zeros((len(data[i][0]), 400))
    y = np.zeros(len(data[i][0]))
    for j in tqdm.trange(len(data[i][0])):
        temp = produceWordEmbd(data[i][0][j])
        X[j] = temp
        y[j] = data[i][1][j]
    Res = []
    kf = KFold(n_splits=5)
    c = CorrelationPearson()
    for train_index, test_index in kf.split(X):
        X_train, X_test = X[train_index], X[test_index]
        y_train, y_test = y[train_index], y[test_index]
        model = regModel
        model.fit(X_train, y_train)
        model_predicted = model.predict(X_test)
        Res.append(c.result(y_test, model_predicted))
        print(regMethod + "- Pearson Coefficient for " + emotions[i] + ": ",
              c.result(y_test, model_predicted))

    print(
        regMethod + ":Avg of pearson-coefficients for the " + emotions[i] +
        " : ",
        sum(Res) / 5)
    finRes.append(sum(Res) / 5)
Ejemplo n.º 2
0
    model.add(Dense(output_dim = 1, init = 'uniform', activation = 'sigmoid'))
    # Compile model
    model.compile(loss='mean_squared_error', optimizer='adam')
    return model

finRes = []
for i in range(4):
    X = np.zeros((len(data[i][0]), 400))
    y = np.zeros(len(data[i][0]))
    for j in tqdm.trange(len(data[i][0])):
        temp = produceWordEmbd(data[i][0][j])
        X[j] = temp
        y[j] = data[i][1][j]
    Res = []
    kf = KFold(n_splits=5)
    c = CorrelationPearson()
    for train_index, test_index in kf.split(X):
        X_train, X_test = X[train_index], X[test_index]
        y_train, y_test = y[train_index], y[test_index]
        model = KerasRegressor(build_fn=NN_model, epochs=100, batch_size=5, verbose=0)
        model.fit(X_train, y_train)
        model_predicted = model.predict(X_test)
        Res.append(c.result(y_test, model_predicted))
        print(regMethod +"- Pearson Coefficient for "+ emotions[i] + ": ", c.result(y_test, model_predicted))
		
    print(regMethod + ":Avg of pearson-coefficients for the " + emotions[i] + " : ", sum(Res)/5)
    finRes.append(sum(Res)/5)

print("--------------------------------------------")
print("Final PC for "+ regMethod ,sum(finRes)/4)
print("--------------------------------------------")
Ejemplo n.º 3
0
            train_labels[k - sum_arr[i - 1]] = data[i][1][k - sum_arr[i - 1]]
    print("Creating dev arrays for " + emotions[j])
    for k in tqdm.trange(sum_arr[j - 1], sum_arr[j]):
        dev_arrays[k - sum_arr[j - 1]] = model[emotions[j] + str(k)]
        dev_labels[k - sum_arr[j - 1]] = data[j][1][k - sum_arr[j - 1]]
    print("Training a Neural Network with ")
    mlp = MLPRegressor(solver='sgd',
                       alpha=1e-5,
                       hidden_layer_sizes=(11, 6, 5),
                       random_state=1,
                       activation='relu',
                       learning_rate='adaptive')
    mlp.fit(train_arrays, train_labels)
    mlp_predicted = mlp.predict(dev_arrays)
    # print(dev_labels - mlp_predicted)
    c = CorrelationPearson()
    print("pearson-coefficient for " + emotions[i] + ": ",
          c.result(dev_labels, mlp_predicted))

regMethods = [
    "Neural Nets", "Decision Tree", "Random Forests", "K-NN", "ADA-Boost",
    "Gradient-Boost"
]
regModels = [
    MLPRegressor(solver='lbfgs',
                 alpha=1e-5,
                 hidden_layer_sizes=(9, 5, 7),
                 random_state=1,
                 activation='tanh',
                 learning_rate='adaptive'),
    DecisionTreeRegressor(random_state=0),
Ejemplo n.º 4
0
             z[i] = 0
     return z
     pass
 features = [unigram_training_words, bigram_training_words, ngram_training_words, tfidf_training_words]
 feature_names = ["unigram", "bigram", "ngram", "tfidf"]
 for z in range(6):
 
     for g in range(4):
         print("Using feature: "+ feature_names[g] + "*****************************************")
         finRes = []
         X = features[g]
         y = training_labels
         y = y.ravel()
         Res = []
         kf = KFold(n_splits=5)
         c = CorrelationPearson()
         for train_index, test_index in kf.split(X):
             #print("TRAIN:", train_index, "TEST:", test_index)
             X_train, X_test = X[train_index], X[test_index]
             y_train, y_test = y[train_index], y[test_index]
             Rmodel = regModels[z]
             Rmodel.fit(X_train, y_train)
             Rmodel_predicted = Rmodel.predict(X_test)
             """
             y_test_new = get_pos_half(y_test, y_test)
             Rmodel_predicted_new = get_pos_half(Rmodel_predicted, y_test)
             y_test_new = y_test_new[y_test_new != 0]
             Rmodel_predicted_new = Rmodel_predicted_new[Rmodel_predicted_new != 0]
             """
             Res.append(c.result(y_test, Rmodel_predicted))
             print("Feature used: "+ feature_names[g] +"Regression model: "+regMethods[z] +"---- Pearson Coefficient for "+ emotions[i] + ": ", c.result(y_test, Rmodel_predicted))
Ejemplo n.º 5
0
import numpy as np
import pandas as pd
from numpy import nan
from sklearn.neighbors import NearestNeighbors
from scipy.sparse import csr_matrix
from sklearn.metrics.pairwise import pairwise_distances
from correlation_pearson.code import CorrelationPearson
pearson = CorrelationPearson()
import operator

from sqlalchemy import create_engine
import pymysql
import mysql.connector

# Database config
mydb = mysql.connector.connect(host="103.129.222.66",
                               port=3306,
                               user="******",
                               password="******",
                               database="mylearn1_mylearning")

mycursor = mydb.cursor()

db_connection_str = 'mysql+pymysql://mylearn1_mylearn1:W7e3l7:[email protected]:3306/mylearn1_mylearning'
db_connection = create_engine(db_connection_str)

sql = """SELECT us.user_id, us.content_id, c.title, c.content_img, c.description, r.rating, b.bookmarked, t.timespent, us.total_selection 
FROM user_selection us 
LEFT OUTER JOIN ratings r ON r.user_id = us.user_id AND r.content_id = us.content_id
LEFT OUTER JOIN bookmarks b ON b.user_id = us.user_id AND b.content_id = us.content_id
LEFT OUTER JOIN timespents t ON t.user_id = us.user_id AND t.content_id = us.content_id
Ejemplo n.º 6
0
#pip install correlation-pearson

from correlation_pearson.code import CorrelationPearson

X_Speed = [0.73, 0.81, 1.53, 1.97, 2.29, 2.86]

X_Energy = [1.507, 1.235, 0.654, 0.864, 0.656, 0.490]

correlation = CorrelationPearson()

print('Correlation coefficient of speed and Energy:' +
      str(correlation.result(X_Speed, X_Energy)))

Y_Power = [1.0, 1.0, 1.1, 1.4, 1.5, 1.7]

Y_Energy = [0.654, 1.235, 1.507, 0.490, 0.656, 0.864]

print('Correlation coefficient of Power and Energy:' +
      str(correlation.result(Y_Power, Y_Energy)))