机器学习——吴恩达 ——编程作业ex1——python

    技术2022-07-10  138

    断断续续看了视频,一直想抽时间编程,找到作业后一通操作,先用Octave搞了下,又花了几个小时用python做了下吴恩达机器学习的ex1第一部分,有所收获,分享至此

    代码如下; main.py

    import numpy as np import matplotlib.pyplot as plt import function from mpl_toolkits.mplot3d import Axes3D # 读取数据 filename = 'ex1data1.txt' data = np.loadtxt(filename, delimiter=',') # 由于文件用逗号分割数据,故指定分割数据的符号 # 把数据赋值给矩阵X和Y X = data[:, 0] Y = data[:, 1] # print(np.shape(X)) # 获取训练数据长度 m = len(Y) # 绘制原始数据的分布 plt.plot(X, Y, 'rx') plt.xlabel('Population of City in 10,000s') plt.ylabel('Profit in $10,000s') # os.system('pause') # 补充X的第一列为1 X = np.c_[np.ones([len(X), 1]), X] print(X) # 初始化参数 theta = np.zeros(2) # theta = [-1, 2] alpha = 0.01 iteras = 1500 # 计算代价函数 cost_result = function.compute_cost(X, Y, theta) print(cost_result) # 更新theta值 theta = function.update_grad(X, Y, theta, alpha, iteras) print(theta) # print(np.shape(theta)) # 绘制拟合曲线 plt.plot(X[:, 1], X @ theta, '-') # 结果测试 predict1 = np.dot([1, 3.5], theta) print('预测值为:', predict1 * 10000) # 可视化理解 theta0_vals = np.linspace(-10, 10, 100) theta1_vals = np.linspace(-1, 4, 100) J_vals = np.zeros([len(theta0_vals), len(theta1_vals)]) for i in range(len(theta0_vals)): for j in range(len(theta1_vals)): t = [theta0_vals[i], theta1_vals[j]] J_vals[i, j] = function.compute_cost(X, Y, t) J_vals = J_vals.T fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.plot_surface(theta0_vals, theta1_vals, J_vals, cmap='rainbow') plt.title('3d图像', fontproperties='SimHei', fontsize=20) plt.xlabel('theta0_vals') plt.ylabel('theta1_vals') plt.show() # 绘制等值图 plt.figure() plt.contourf(theta0_vals, theta1_vals, J_vals, 10) plt.contour(theta0_vals, theta1_vals, J_vals, colors='black') plt.plot(theta[0], theta[1], 'rx', markerSize=20, LineWidth=1) plt.title('等值线图', fontproperties='SimHei', fontsize=20) plt.xlabel('theta0') plt.ylabel('theta1') plt.show()

    function.py

    import numpy as np def compute_cost(X, Y, theta): m = len(Y) H = np.dot(X, theta) J = np.sum((H - Y) ** 2) / (2 * m) return J def update_grad(X, Y, theta, alpha,iteras): m = len(Y) J_history = np.zeros([iteras,1]) for i in range(iteras): theta = theta - (alpha / m) * (X.T @ (X @ theta - Y)) J_history[i] = compute_cost(X, Y, theta) return theta

    效果: 图一: 图二: 图三:

    Processed: 0.013, SQL: 9