def test_stoc_grade_plot(self): data_set, label_mat = logRegres.loadDataSet() print("\n data_set == %s" % (data_set)) print("\n label_mat == %s" % (label_mat)) weights = logRegres.stocGradAscent0(array(data_set), label_mat) print("\n weights == %s" % (weights)) logRegres.plotBestFit(weights)
def test_grade_plot(self): data_set, label_mat = logRegres.loadDataSet() print("\n data_set == %s" % (data_set)) print("\n label_mat == %s" % (label_mat)) weights = logRegres.gradAscent(data_set, label_mat) print("\n weights == %s" % (weights)) # getA 为将numpy中的矩阵转换为python的array logRegres.plotBestFit(weights.getA())
def test_best_stoc_grade_plot(self): data_set, label_mat = logRegres.loadDataSet() print("\n data_set == %s" % (data_set)) print("\n label_mat == %s" % (label_mat)) # 迭代150次 weights = logRegres.stocGradAscent1(array(data_set), label_mat, 200) print("\n weights == %s" % (weights)) # getA 为将numpy中的矩阵转换为python的array logRegres.plotBestFit(weights)
#coding=utf-8 import logRegres from numpy import * dataArr,labelMat=logRegres.loadDataSet() # dataMatrix = mat(dataArr) #convert to NumPy matrix # print (dataMatrix) ascentMatrix=logRegres.gradAscent(dataArr,labelMat) print(ascentMatrix) logRegres.plotBestFit(ascentMatrix.getA()) # ascentMatrix=logRegres.stocGradAscent(array(dataArr),labelMat) # print(ascentMatrix) # logRegres.plotBestFit(ascentMatrix) logRegres.multiTest()
#!/usr/bin/python # encoding: utf-8 ''' Created on Nov 28, 2015 @author: yanruibo ''' import logRegres import numpy as np if __name__ == '__main__': dataArr,labelMat = logRegres.loadDataSet() #weights = logRegres.gradAscent(dataArr, labelMat) weights = logRegres.stocGradAscent0(np.array(dataArr), labelMat) print weights #logRegres.plotBestFit(weights.getA()) logRegres.plotBestFit(weights)
def stocGradAscent1(numIter): dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.stocGradAscent1(array(dataArr), labelMat, numIter); print weights logRegres.plotBestFit(weights);
#for the loadDataSet import logRegres dataArr, labelMat = logRegres.loadDataSet() wei = logRegres.gradAscent(dataArr, labelMat) #for the gradAscent from imp import reload reload(logRegres) weights1 = wei.getA() logRegres.plotBestFit(weights1) #for the stocGradAscent0 from numpy import * reload(logRegres) dataArr, labelMat = logRegres.loadDataSet() weights2 = logRegres.stocGradAscent0(array(dataArr), labelMat) logRegres.plotBestFit(weights2) #for the stocGradAscent1 reload(logRegres) dataArr, labelMat = logRegres.loadDataSet() weights3 = logRegres.stocGradAscent1(array(dataArr), labelMat) logRegres.plotBestFit(weights3) #for the classifyVector reload(logRegres) logRegres.multiTest()
# -*- coding: utf-8 -*- from numpy import * import logRegres data, ls = logRegres.loadDataSet() wei1 = logRegres.gradAscent(data, ls) logRegres.plotBestFit(wei1) reload(logRegres) wei2 = logRegres.stocGradAscent0(array(data), ls) logRegres.plotBestFit(wei2) wei3 = logRegres.stocGradAscent1(array(data), ls) logRegres.plotBestFit(wei3) import logRegres logRegres.multiTest()
def main(): dataAttr,labelsMat = logRegres.loadDataSet() weights = logRegres.gradAscent(dataAttr,labelsMat) logRegres.plotBestFit(weights)
# -*- coding: utf-8 -*- ''' Created on 2016年2月26日 @author: nocml ''' from numpy import * import logRegres dataArr , labelMat = logRegres.loadDataSet() print dataArr # weights = logRegres.gradAscent(dataArr, labelMat) weights = logRegres.stocGradAscent1(array(dataArr), labelMat , 150) # print "weights:" print weights # weights = [9.90028796735921,1.4181704748685218, -1.3358509819647089 ] # weights = [10.373488441795256, 0.7810704644295239 , -1.5443579566870218 ] logRegres.plotBestFit(array(weights))
__author__ = 'sunbeansoft' import logRegres as lr from numpy import * dataArr, labelMat = lr.loadDataSet() weight = lr.gradAscent(dataArr, labelMat) lr.plotBestFit(weight.getA()) weight = lr.stocGradAscent0(array(dataArr), labelMat) lr.plotBestFit(weight) weight = lr.stocGradAscent1(array(dataArr), labelMat) lr.plotBestFit(weight) lr.multiTest()
import logRegres from numpy import * a1, a2 = logRegres.loadDataSet() #print(a1) #print(a2) b1 = logRegres.gradAscent(a1, a2) print(b1.getA()) logRegres.plotBestFit(b1.getA()) ###perfect #c1 = logRegres.stocGradAscent1(array(a1),a2) #logRegres.plotBestFit(c1)
import logRegres """ dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.gradAscent(dataArr, labelMat) from numpy import * logRegres.plotBestFit(weights) from numpy import * dataArr, labelMat = logRegres.loadDataSet() #weights = logRegres.stocGradAscent0(array(dataArr), labelMat) weights = logRegres.stocGradAscent1(array(dataArr), labelMat) logRegres.plotBestFit(weights) """ logRegres.multiTest()
def test_plotBestFit(): dataSet, labels = logRegres.loadDataSet() weights = logRegres.gradAscent(dataSet, labels) logRegres.plotBestFit(weights.getA())
import logRegres dataArr, labelMat = logRegres.loadDataSet() a = logRegres.gradAscent(dataArr, labelMat) print a from numpy import * reload(logRegres) print logRegres.plotBestFit(a.getA()) ''' weights = logRegres.stocGradAscent0 (array(dataArr),labelMat) print logRegres.plotBestFit(weights) ''' weights = logRegres.stocGradAscent1(array(dataArr), labelMat) print logRegres.plotBestFit(weights)
def run(): dataMat, labelMat = lr.loadDataSet() weights = lr.stocGradAscent1(dataMat, labelMat) print weights lr.plotBestFit(weights)
from numpy import * import logRegres dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.stocGradAscent0(array(dataArr), labelMat) logRegres.plotBestFit(matrix(weights).transpose())
import logRegres from numpy import * reload(logRegres) #dataArr,labelMat=logRegres.loadDataSet() #weights=logRegres.gradAscent(dataArr,labelMat) #w=logRegres.stocGradAscent0(array(dataArr),labelMat) ''' w=logRegres.stocGradAscent1(array(dataArr),labelMat,500) print w logRegres.plotBestFit(w) ''' logRegres.multiTest()
def stocGradAscent1(numIter): dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.stocGradAscent1(array(dataArr), labelMat, numIter) print weights logRegres.plotBestFit(weights)
#!usr/bin/python #coding:utf8 import logRegres from numpy import * dataMat, Lables = logRegres.loadDataSet() weights = logRegres.stocGradAscent1(array(dataMat), Lables) logRegres.plotBestFit(dataMat, Lables, weights) # x = arange(-3.0, 3.0, 0.1) # print x # logRegres.muliTest()
def gradAscent(): dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.gradAscent(dataArr, labelMat) print weights logRegres.plotBestFit(weights.getA())
import logRegres from numpy import * dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.gradAscent(dataArr, labelMat) logRegres.plotBestFit(weights.getA()) weights = logRegres.stocGradAscent0(array(dataArr), labelMat) logRegres.plotBestFit(weights) weights = logRegres.stocGradAscent1(array(dataArr), labelMat, 500) logRegres.plotBestFit(weights) logRegres.multiTest()
#从文件夹中提取数据 dataArr, labelMat = logRegres.loadDataSet() #加载数据,存放在列表中 print "\n数据列表是:\n", dataArr #打印数据,测试读取是否异常 print "\n类列表是:\n", labelMat #用数据和标签 利用梯度上升算法计算 权重 weights = logRegres.gradAscent(dataArr, labelMat) #梯度上升算法计算最佳参数值 stocWeights = logRegres.stocGradAscent1(array(dataArr), labelMat, 500) #随机梯度上升算法计算最佳参数值 print "\n权重w0,w1,w2的值是:\n", weights ####################################### 第一个图:梯度上升算法的例子 ################################# #利用权重绘制直线 利用数据绘制点 print "\n第一个图:梯度上升算法的例子" #梯度上升算法:批量处理方法(一次性处理所有数) logRegres.plotBestFit( weights.getA()) # .getA()将矩阵转换成数组 因为数组可以很方便的任意读取其中的元素,矩阵不行 ####################################### 第二个图:随机梯度上升算法的例子 ################################# #随机梯度上升:在线学习方法(新样本来到时,对分类器进行增量式更新) print "第二个图:随机梯度上升算法的例子" # logRegres.plotBestFit(stocWeights) ######################################## 第三个例子:预测病马的死亡率 ################################# #病马死亡率预测 print "\n第三个例子:预测病马的死亡率" logRegres.multiTest() #程序运行结果: ''' 数据列表是: [[1.0, -0.017612, 14.053064], [1.0, -1.395634, 4.662541], [1.0, -0.752157, 6.53862], [1.0, -1.322371, 7.152853], [1.0, 0.423363, 11.054677], [1.0, 0.406704, 7.067335], [1.0, 0.667394, 12.741452], [1.0, -2.46015, 6.866805], [1.0, 0.569411, 9.548755], [1.0, -0.026632, 10.427743], [1.0, 0.850433, 6.920334], [1.0, 1.347183, 13.1755], [1.0, 1.176813, 3.16702], [1.0, -1.781871, 9.097953], [1.0, -0.566606, 5.749003], [1.0, 0.931635, 1.589505], [1.0, -0.024205, 6.151823], [1.0, -0.036453, 2.690988], [1.0, -0.196949, 0.444165], [1.0, 1.014459, 5.754399], [1.0, 1.985298, 3.230619], [1.0, -1.693453, -0.55754], [1.0, -0.576525, 11.778922], [1.0, -0.346811, -1.67873], [1.0, -2.124484, 2.672471], [1.0, 1.217916, 9.597015], [1.0, -0.733928, 9.098687], [1.0, -3.642001, -1.618087], [1.0, 0.315985, 3.523953], [1.0, 1.416614, 9.619232], [1.0, -0.386323, 3.989286], [1.0, 0.556921, 8.294984], [1.0, 1.224863, 11.58736], [1.0, -1.347803, -2.406051], [1.0, 1.196604, 4.951851], [1.0, 0.275221, 9.543647], [1.0, 0.470575, 9.332488], [1.0, -1.889567, 9.542662], [1.0, -1.527893, 12.150579], [1.0, -1.185247, 11.309318], [1.0, -0.445678, 3.297303], [1.0, 1.042222, 6.105155], [1.0, -0.618787, 10.320986], [1.0, 1.152083, 0.548467], [1.0, 0.828534, 2.676045], [1.0, -1.237728, 10.549033], [1.0, -0.683565, -2.166125], [1.0, 0.229456, 5.921938], [1.0, -0.959885, 11.555336], [1.0, 0.492911, 10.993324], [1.0, 0.184992, 8.721488], [1.0, -0.355715, 10.325976], [1.0, -0.397822, 8.058397], [1.0, 0.824839, 13.730343], [1.0, 1.507278, 5.027866], [1.0, 0.099671, 6.835839], [1.0, -0.344008, 10.717485], [1.0, 1.785928, 7.718645], [1.0, -0.918801, 11.560217], [1.0, -0.364009, 4.7473], [1.0, -0.841722, 4.119083], [1.0, 0.490426, 1.960539], [1.0, -0.007194, 9.075792], [1.0, 0.356107, 12.447863], [1.0, 0.342578, 12.281162], [1.0, -0.810823, -1.466018], [1.0, 2.530777, 6.476801], [1.0, 1.296683, 11.607559], [1.0, 0.475487, 12.040035], [1.0, -0.783277, 11.009725], [1.0, 0.074798, 11.02365], [1.0, -1.337472, 0.468339], [1.0, -0.102781, 13.763651], [1.0, -0.147324, 2.874846], [1.0, 0.518389, 9.887035], [1.0, 1.015399, 7.571882], [1.0, -1.658086, -0.027255], [1.0, 1.319944, 2.171228], [1.0, 2.056216, 5.019981], [1.0, -0.851633, 4.375691], [1.0, -1.510047, 6.061992], [1.0, -1.076637, -3.181888], [1.0, 1.821096, 10.28399], [1.0, 3.01015, 8.401766], [1.0, -1.099458, 1.688274], [1.0, -0.834872, -1.733869], [1.0, -0.846637, 3.849075], [1.0, 1.400102, 12.628781], [1.0, 1.752842, 5.468166], [1.0, 0.078557, 0.059736], [1.0, 0.089392, -0.7153], [1.0, 1.825662, 12.693808], [1.0, 0.197445, 9.744638], [1.0, 0.126117, 0.922311], [1.0, -0.679797, 1.22053], [1.0, 0.677983, 2.556666], [1.0, 0.761349, 10.693862], [1.0, -2.168791, 0.143632], [1.0, 1.38861, 9.341997], [1.0, 0.317029, 14.739025]] 类列表是: [0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0]
import logRegres from numpy import * if __name__ == '__main__': dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.gradAscent(dataArr, labelMat) logRegres.plotBestFit(weights.getA()) weights1 = logRegres.stocGradAscent0(array(dataArr), labelMat) logRegres.plotBestFit(weights1) weights2 = logRegres.stocGradAscent1(array(dataArr), labelMat) logRegres.plotBestFit(weights2)
''' Created on May 27, 2014 Logistic Regression Main Study @author: Guodong Jin ''' import logRegres from numpy import * dataArr,labelMat = logRegres.loadDataSet() print len(dataArr) print labelMat res_w = logRegres.gradAscent(dataArr, labelMat) print res_w # logRegres.plotBestFit(res_w.getA()) res_w, l_w0, l_w1, l_w2= logRegres.stocGradAscent1_0(array(dataArr), labelMat, 100) logRegres.plotBestFit(res_w) # import matplotlib.pyplot as plt # x = range(len(l_w0)) # fig = plt.figure() # ax = fig.add_subplot(111) # print res_w # ax.plot(x, array(l_w1)) # plt.show()
import logRegres dataArr, labelMat = logRegres.loadDataSet() wei = logRegres.gradAscend(dataArr, labelMat) logRegres.plotBestFit(wei)
from numpy import * import logRegres import logRegresGo dataArr, labelMat = logRegres.loadDataSet() weights = logRegres.gradAscent(dataArr, labelMat) logRegres.plotBestFit(weights)
# autor: zhumenger import logRegres from numpy import * dataArr, labelMat = logRegres.loadDataSet() print(logRegres.gradAscent(dataArr, labelMat)) weigths = logRegres.stocGradAscent1(array(dataArr), labelMat) print(logRegres.plotBestFit(weigths))