this code runs LR and NN at the same time
"""

import numpy as np
import IOutils
# from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression
from sklearn import svm
import random
import sys
from sklearn.metrics import confusion_matrix, precision_score, recall_score, f1_score, classification_report

from neuralnetworks.templates import BasicNN2

nn = BasicNN2(max_epochs=100, hidden=[200,50,30], input_shape=(None, 42), output_num_units=2)
ds = IOutils.data_streamer2(keeplist=[ (i,0) for i in xrange(1,12) ]) 

vt = IOutils.VectorTransformer()

# NaivB = GaussianNB()
lr = LogisticRegression(class_weight="auto")

#svc = svm.SVC(kernel='rbf',C=10,class_weight="auto")
#=======
# svc = svm.SVC(kernel='rbf',C=10,class_weight="auto")
for X_next, Y_next in ds:
	X_next = X_next.astype(np.float)/X_next.max()
	X_next[np.isnan(X_next)]=0
	zipped = zip(X_next,Y_next)
	random.shuffle(zipped)
	X,Y = zip(*zipped)
# In[1]:

import numpy as np
import matplotlib.pylab as plt
from collections import Counter
import IOutils
import random
from neuralnetworks.templates import BasicNN2
from sklearn.metrics import confusion_matrix, classification_report
from itertools import product


# In[2]:

training_ds = IOutils.data_streamer2(keeplist=[ (i, 0) for i in range(1,12) ]) # obtain data for first 11 patients

nn = BasicNN2(input_shape=(None,42), output_num_units=11, max_epochs=100, hidden=[200,120,30])

vt = IOutils.VectorTransformer()


# In[3]:

n_repeat_sampling = 1
dataset_count = 0
for X,Y in training_ds:
    dataset_count += 1
    # transform the Ys
    Y = vt.transform(Y)
#     print('total size before sampling:', len(Y))
import matplotlib.pylab as plt
from collections import Counter
import sys


if not len(sys.argv)>1:
    print """arguments:
        SUBJECT_ID: Id of the subject you want to train
    """
    raise Exception('NEED MORE ARGUMENTS')
    
    
# NUM_ZERO_METRIC = sys.argv[3]
# obtain the first 7 datas for the 2nd subject
subject_id = int(sys.argv[1])
training_ds = IOutils.data_streamer2(keeplist=[ (subject_id, i) for i in range(1,7) ]) 
#nn = BasicNN(input_shape=(None,42), output_num_units=12, max_epochs=int(sys.argv[5]), hidden_num_units=int(sys.argv[4]))
vt = IOutils.VectorTransformer()

linear = LogisticRegression(class_weight = 'auto')
           
# n_repeat_sampling = int(sys.argv[2])
dataset_count = 0
for X,Y in training_ds:
    dataset_count += 1
    # transform the Ys
    Y = vt.transform(Y)
#     print('total size before sampling:', len(Y))
    X = X.astype(np.float)
    # normalization for regression
    X[np.isnan(X)] = 0
import IOutils
# from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
# from sklearn import svm
import random
import sys
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import f1_score




ds = IOutils.data_streamer2() 

vt = IOutils.VectorTransformer()

X_valid, Y_valid = ds.next()

# use as follows: 
Y_valid = vt.transform(Y_valid)


Y_valid [Y_valid != 0] = 1
# Y_valid [Y_valid == 0] = 0 

# NaivB = GaussianNB()
# linear = LogisticRegression(class_weight = 'auto')  #accounts for class imbalance
# support = svm.SVC(kernel='rbf',C=10)
Exemple #5
0
import numpy as np
import os
import pandas as pd





# Grab train data
data = []
label = []
n_sub = 1
n_series = 8

train_streamer = io.data_streamer2(mode='train')

for k in range(n_sub):
    sub_data = []
    sub_label = []
    for series in range(n_series):
        d, e = train_streamer.next()
        sub_data.append(d)
        sub_label.append(e)

    data.append(sub_data)
    label.append(sub_label)

np.save('eeg_train.npy', [data, label])

del data, label
# coding: utf-8

# In[1]:

import numpy as np
import matplotlib.pylab as plt
from collections import Counter
import IOutils
import random
from neuralnetworks.templates import BasicNN2
from sklearn.metrics import confusion_matrix, classification_report


# In[ ]:

training_ds = IOutils.data_streamer2(keeplist=[ (1, i) for i in range(1,7) ]) # obtain the first 7 datas for the 1st subject
nn = BasicNN2(input_shape=(None,42), output_num_units=11, max_epochs=100, hidden=[200,120,30])
vt = IOutils.VectorTransformer()


# In[ ]:

n_repeat_sampling = 1
dataset_count = 0
for X,Y in training_ds:
    dataset_count += 1
    # transform the Ys
    Y = vt.transform(Y)
#     print('total size before sampling:', len(Y))
    X = X.astype(np.float)
    # normalization for regression