Untitled
unknown
python
a year ago
719 B
19
Indexable
import scipy.optimize as opt
import numpy as np
def rosenbrock(x):
x1, x2 = x
return 100*(x2 - x1**2)**2 + (1 - x1)**2
def grad_rosenbrock(x):
x1, x2 = x
dfdx1 = -400*x1*(x2 - x1**2) - 2*(1 - x1)
dfdx2 = 200*(x2 - x1**2)
return np.array([dfdx1, dfdx2])
x0 = np.array([0, 0])
res = opt.minimize(rosenbrock, x0, jac=grad_rosenbrock)
print(res.x)
print(res.fun)
def gradient_descent(f, grad_f, x0, alpha=0.001, tol=1e-6, max_iter=10000):
x = x0
for i in range(max_iter):
x_new = x - alpha*grad_f(x)
if np.linalg.norm(x_new - x) < tol:
break
x = x_new
return x
x0 = np.array([0, 0])
x = gradient_descent(rosenbrock, grad_rosenbrock, x0)
print(x)
Editor is loading...
Leave a Comment