机器学习-Logistic回归(最佳回归系数的确定)

it2022-05-05  137

后悔没学好概率论了,那公式看的好吃力啊,怎么办,我好慌。。。

数据包

Sigmoid函数:

全批量梯度上升法:

import numpy as np import matplotlib.pyplot as plt """装载函数""" def loadDataSet(): dataMat = [] labelMat = [] fr = open('testSet.txt') for line in fr.readlines(): # 按行取数据 lineArr = line.strip().split() # 按空格切分 dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])]) # 每一行的前两个数据存入特征集 labelMat.append(int(lineArr[2])) # 每一行的最后一个数据存入标签集 return dataMat, labelMat """Sigmoid函数""" def sigmoid(inX): return 1.0 / (1 + np.exp(-inX)) """梯度上升发""" def gradAscent(dataMatIn, classLabels): dataMatrix = np.mat(dataMatIn) # 转换成numpy的mat labelMat = np.mat(classLabels).transpose() # 转换成numpy的mat,并进行转置 m, n = np.shape(dataMatrix) # 返回dataMatrix的大小。m为行数,n为列数。 alpha = 0.001 # 移动步长,也就是学习速率,控制更新的幅度。 maxCycles = 500 # 最大迭代次数 weights = np.ones((n, 1)) # 权重,含有一列向量,并且元素都为1的矩阵 for k in range(maxCycles): # 迭代500次 h = sigmoid(dataMatrix * weights) # 梯度上升矢量化公式 error = labelMat - h weights = weights + alpha * dataMatrix.transpose() * error return weights.getA() """绘制拟合曲线""" def plotBestFit(weights): dataMat, labelMat = loadDataSet() dataArr = np.array(dataMat) n = np.shape(dataMat)[0] # 数据个数 xcord1 = [] ycord1 = [] xcord2 = [] ycord2 = [] for i in range(n): if int(labelMat[i]) == 1: # 红色的点 xcord1.append(dataArr[i, 1]) ycord1.append(dataArr[i, 2]) else: # 绿色的点 xcord2.append(dataArr[i, 1]) ycord2.append(dataArr[i, 2]) fig = plt.figure() # 画布大小默认 ax = fig.add_subplot(111) # 设置子图 ax.scatter(xcord1, ycord1, s=30, c='red', marker='s') ax.scatter(xcord2, ycord2, s=30, c='green') x = np.arange(-3.0, 3.0, 0.1) # 绘制你和曲线,x轴方向每次增加0.1个点 y = (-weights[0] - weights[1] * x) / weights[2] # 绘制你和曲线 y轴方向每次增加幅度 ax.plot(x, y) plt.title('BestFit') plt.xlabel('X1') plt.ylabel('X2') plt.show() if __name__ == '__main__': dataMat, labelMat = loadDataSet() weights = gradAscent(dataMat, labelMat) plotBestFit(weights)


最新回复(0)