一维梯度下降
import numpy
as np
import matplotlib
.pyplot
as plt
def f(x
):
return x
**2+5*x
+4
def gradient(x
):
delat
= 1e-4
return (f
(x
+delat
)-f
(x
))/delat
theta
= 10
x_theta
= []
alpha
= 0.1
while abs(gradient
(theta
)) > 0.001:
x_theta
.append
(theta
)
theta
= theta
- alpha
* gradient
(theta
)
plt
.figure
(num
=1)
plot_x
= np
.linspace
(-15, 15, 1000)
plot_y
= f
(plot_x
)
plt
.plot
(plot_x
, plot_y
)
x_theta
= np
.array
(x_theta
)
y_theta
= f
(x_theta
)
plt
.scatter
(x_theta
, y_theta
, s
=30, c
='red', alpha
=0.6)
plt
.show
()
二维梯度下降算法
import matplotlib
.pyplot
as plt
from mpl_toolkits
.mplot3d
import Axes3D
import numpy
as np
def f(x
):
return 0.2 * (x
[0] + x
[1]) ** 2 - 0.3 * x
[0] * x
[1] + 0.4
def gradient(x
):
delat
= 1e-4
temp_f
= f
(x
)
grad
= np
.zeros_like
(x
)
for idex
in range(x
.size
):
tem_val
= x
[idex
]
x
[idex
] = tem_val
+ delat
f_delat
= f
(x
) - temp_f
grad
[idex
] = f_delat
/ delat
x
[idex
] = tem_val
return grad
x
= np
.array
([10, 13])
x_theta
= []
y_theta
= []
alpha
= 0.1
num
= 0
step_num
= 1000
def is_iterate(x
):
temp
= gradient
(x
)
if(temp
[0]**2 + temp
[1]**2 >= 0.001):
return True
else:
return False
while num
<= step_num
:
x_theta
.append
(x
)
y_theta
.append
(f
(x
))
grad
= gradient
(x
)
x
= x
- alpha
* grad
num
+= 1
x_theta
= np
.array
(x_theta
)
y_theta
= np
.array
(y_theta
)
x1
= np
.linspace
(-15, 15, 500)
x2
= np
.linspace
(-15, 15, 500)
X
, Y
= np
.meshgrid
(x1
, x2
)
fig
= plt
.figure
(num
=1)
ax
= Axes3D
(fig
)
ax
.plot_surface
(X
, Y
, f
([X
, Y
]), rstride
=1, cstride
=1, cmap
=plt
.get_cmap
('rainbow'))
ax
.plot
(x_theta
[:, 0], x_theta
[:, 1], y_theta
, 'bo--')
plt
.figure
(num
=2)
C
= plt
.contour
(X
, Y
, f
([X
, Y
]), 25, linewidths
=1,colors
='k')
plt
.scatter
(x_theta
[:, 0], x_theta
[:, 1], s
=5, c
="r",alpha
=0.5)
plt
.colorbar
(C
)
plt
.title
("函数等高线图及梯度下降路径")
plt
.show
()
转载请注明原文地址:https://ipadbbs.8miu.com/read-42587.html