Ejemplo n.º 1
0
def part_b(file_n):
    f = open(file_n, "r")
    ip = int(f.readline())
    op = int(f.readline())
    batch = int(f.readline())
    n = int(f.readline())
    h = (f.readline()).rstrip().split(" ")
    h = map(int, h)
    h = [ip] + h + [op]
    if f.readline() == "relu\n":
        non_lin = 1
    else:
        non_lin = 0
    if f.readline() == "fixed\n":
        eta = 0
    else:
        eta = 1
    print ip, op, batch, n
    print h
    print non_lin, eta
    start = timeit.default_timer()
    net = NeuralNet(h, bool(non_lin))
    net.grad_des(x[:, 0:-1], x[:, -1], batch, bool(eta))
    stop = timeit.default_timer()
    t_acc = 100 * net.score(x[:, 0:-1], x[:, -1])
    ts_acc = 100 * net.score(tests[:, 0:-1], tests[:, -1])
    print "Train accuracy ", t_acc
    print "Test accuracy ", ts_acc
    print "Training time ", (stop - start)
    conf = confusion_matrix(tests[:, -1].tolist(), net.pred(tests[:, 0:-1]))
    plot_confusion(conf, list(set(tests[:, -1].flatten().tolist())),
                   "For layers " + str(h))
Ejemplo n.º 2
0
def part_d(eta_a=False, rlu=False):
    tt = np.zeros((5, 2))
    m = 0
    h = [85, 0, 0, 10]
    l = [5, 10, 15, 20, 25]
    for i in [5, 10, 15, 20, 25]:
        print "For 2 layer ", i, eta_a, rlu
        h[1] = i
        h[2] = i
        start = timeit.default_timer()
        net = NeuralNet(h, rlu)
        net.grad_des(x[:, 0:-1], x[:, -1], 100, eta_a)
        stop = timeit.default_timer()
        t_acc = 100 * net.score(x[:, 0:-1], x[:, -1])
        ts_acc = 100 * net.score(tests[:, 0:-1], tests[:, -1])
        f_ptr.write("\nFor double layer " + str(eta_a) + str(rlu))
        f_ptr.write(str(i))
        f_ptr.write("\nTraining acc ")
        f_ptr.write(str(t_acc))
        f_ptr.write("\nTesting acc ")
        f_ptr.write(str(ts_acc))
        f_ptr.write("\nTrainig time ")
        f_ptr.write(str(stop - start))
        print "Train accuracy ", t_acc
        print "Test accuracy ", ts_acc
        print "Training time ", (stop - start)
        tt[m, 0] = t_acc
        tt[m, 1] = ts_acc
        m = m + 1
        conf = confusion_matrix(tests[:, -1].tolist(), net.pred(tests[:,
                                                                      0:-1]))
        plot_confusion(conf, list(set(tests[:, -1].flatten().tolist())),
                       "For 2 layers " + str(h) + str(eta_a) + str(rlu))
    print tt
    plot_metric(tt, l, "For two hidden layers " + str(eta_a) + str(rlu))
Ejemplo n.º 3
0
def libsvm(prob,param,par_s,s,flag=False,flag_x=False,flag_v=False,conf=False):
	global acc,acc_v	
	param.parse_options(par_s)
	print "c ",param.C
	start = timeit.default_timer()
	model = svm_train(prob,param)
	stop = timeit.default_timer()
	print "Training time with libsvm "+s+" kernel",stop-start,"seconds"
	l = model.l
	print "Libsvm weight vector w is "
	alph = np.zeros((m,1))
	if flag==False:	
		for i in range(l):
			ind = model.sv_indices[i]
			alph[ind,0] = model.sv_coef[0][i]
		w = np.matmul(np.transpose(alph*y),x)
		print "Weight vector w is ",w
	print "Intercept form b is ",b
	if flag_v==True:
		_, acc_v, _ = svm_predict(v_set_y, v_set_x.tolist(), model)
		print("Accuracy over validation test set using libsvm with "+s+" kernel: ", acc_v)
	if flag_x==True:		
		_, acc_x, _ = svm_predict(y, x.tolist(), model)
		print("Accuracy over training set using libsvm with "+s+" kernel: ", acc_x)
	pred_val, acc, _ = svm_predict(test_y, test_x.tolist(), model)
	print("Accuracy over test set using libsvm with "+s+" kernel: ", acc)
	if conf==True:
		conf = confusion_matrix(test_y.tolist(),pred_val)
		confmat.plot_confusion(conf,list(set(test_y.flatten().tolist())),"SVM (Multi-Classification)")
Ejemplo n.º 4
0
def conf_matrix(p):
    cnt = len(test_data)
    actual = [int(test_data[i][0]) for i in range(cnt)]
    pred = [int(p[i] + 1) for i in range(cnt)]
    con_mat = confusion_matrix(actual, pred)
    print("Confusion matrix on test set is: ")
    print(con_mat)
    confmat.plot_confusion(con_mat, list(set(actual)),
                           "Naive Bayes Prediction on Test Data")