Example #1
0
def sampleTraining(X, Y, Xt, Yt=None):
    # Function used for testing various model of training

    # Class weight
    class_weight = {0:0.6, 1:0.4}

    linearRegression(X, Y, Xt, Yt)
    logisticRegression(X, Y, Xt, Yt, class_weight)
    sgdClassify(X, Y, Xt, Yt, class_weight)
Example #2
0
def training(X, Y, Xt, filename):
    # Function actually used to generate predictions for final test sets
    # Class weight
    class_weight = {0:0.6, 1:0.4}
    YPredict1 = linearRegression(X, Y, Xt, None)
    YPredict2, YProb2 = logisticRegression(X, Y, Xt, None, class_weight)
    YPredict3, YProb3 = sgdClassify(X, Y, Xt, None, class_weight)

    util.savePred(YPredict1, YPredict1, "{0}1.csv".format(filename))
    util.savePred(YPredict2, YProb2, "{0}2.csv".format(filename))
    util.savePred(YPredict3, YProb2, "{0}3.csv".format(filename))
Example #3
0
    ax.scatter(xcord2, ycord2, s=30, c='green')
    x = np.arange(-3.0, 3.0, 0.1)
    y = (-theta[0] - theta[1] * x) / theta[2] # z = w0 + w1x1 + w2x2

    ax.plot(x,y)
    plt.xlabel('X1')
    plt.ylabel('X2')
    plt.show()

def classifier(theta, x1, x2):
    z = theta[0] + theta[1]*x1 + theta[2] * x2
    value = regression.sigmoid(z)
    return value

dataArr, labelArr = loadDataSet()
theta = regression.logisticRegression(dataArr, labelArr)
theta2 = regression.sga1(np.array(dataArr), labelArr)


print theta
"""
plotBestFit(theta2, dataArr, labelArr)

n = np.shape(dataArr)[0]
for i in range(n):
    output = classifier(theta, dataArr[i][1], dataArr[i][2])
    if(output >= 0.5):
        group = 1
    else:
        group = 0
    print dataArr[i][1], dataArr[i][2], output, "-->", group
Example #4
0
    y = (-theta[0] - theta[1] * x) / theta[2]  # z = w0 + w1x1 + w2x2

    ax.plot(x, y)
    plt.xlabel('X1')
    plt.ylabel('X2')
    plt.show()


def classifier(theta, x1, x2):
    z = theta[0] + theta[1] * x1 + theta[2] * x2
    value = regression.sigmoid(z)
    return value


dataArr, labelArr = loadDataSet()
theta = regression.logisticRegression(dataArr, labelArr)
theta2 = regression.sga1(np.array(dataArr), labelArr)

print theta
"""
plotBestFit(theta2, dataArr, labelArr)

n = np.shape(dataArr)[0]
for i in range(n):
    output = classifier(theta, dataArr[i][1], dataArr[i][2])
    if(output >= 0.5):
        group = 1
    else:
        group = 0
    print dataArr[i][1], dataArr[i][2], output, "-->", group
"""
	def __init__(self, rng, input, n_in, hidden_layers_sizes, n_out):
		'''
		self.hiddenLayer = HiddenLayer(
            		rng=rng,
            		input=input,
            		n_in=n_in,
           		n_out=n,
            		activation=T.tanh
        	)
		'''
		self.params = []
		self.sigmoid_layers = []
		self.n_layers = len(hidden_layers_sizes)
		for i in xrange(self.n_layers):
			if i == 0:
                		input_size = n_in
            		else:
                		input_size = hidden_layers_sizes[i - 1]

			if i == 0:
                		layer_input = input
            		else:
                		layer_input = self.sigmoid_layers[-1].output
			sigmoid_layer = HiddenLayer(rng=rng,
                                        input=layer_input,
                                        n_in=input_size,
                                        n_out=hidden_layers_sizes[i],
                                        activation=T.nnet.sigmoid)
			self.sigmoid_layers.append(sigmoid_layer)
			self.params.extend(sigmoid_layer.params)	
	
        	self.logRegressionLayer = logisticRegression(
							input=self.sigmoid_layers[-1].output,
            						n_in=hidden_layers_sizes[-1],
            						n_out=n_out
        					)
			
		self.L1 = abs(self.logRegressionLayer.W).sum()
		for i in self.sigmoid_layers:
			self.L1 += abs(i.W).sum()
		'''	
		self.L1 = (
            		abs(self.hiddenLayer.W).sum()
            		+ abs(self.logRegressionLayer.W).sum()
        	)
		'''
		self.L2_sqr = abs(self.logRegressionLayer.W ** 2).sum()
                for i in self.sigmoid_layers:
                        self.L2_sqr += abs(i.W ** 2).sum() 
		'''
        	self.L2_sqr = (
            		(self.hiddenLayer.W ** 2).sum()
            		+ (self.logRegressionLayer.W ** 2).sum()
        	)
		'''
        	self.negative_log_likelihood = (
            		self.logRegressionLayer.negative_log_likelihood
        	)

        	self.errors = self.logRegressionLayer.mse
		self.params.extend(self.logRegressionLayer.params)