Untitled

mail@pastecode.io avatar
unknown
python
a month ago
719 B
10
Indexable
Never
import scipy.optimize as opt
import numpy as np

def rosenbrock(x):
    x1, x2 = x
    return 100*(x2 - x1**2)**2 + (1 - x1)**2

def grad_rosenbrock(x):
    x1, x2 = x
    dfdx1 = -400*x1*(x2 - x1**2) - 2*(1 - x1)
    dfdx2 = 200*(x2 - x1**2)
    return np.array([dfdx1, dfdx2])

x0 = np.array([0, 0])
res = opt.minimize(rosenbrock, x0, jac=grad_rosenbrock)
print(res.x)
print(res.fun)

def gradient_descent(f, grad_f, x0, alpha=0.001, tol=1e-6, max_iter=10000):
    x = x0
    for i in range(max_iter):
        x_new = x - alpha*grad_f(x)
        if np.linalg.norm(x_new - x) < tol:
            break
        x = x_new
    return x

x0 = np.array([0, 0])
x = gradient_descent(rosenbrock, grad_rosenbrock, x0)
print(x)
Leave a Comment