Untitled

 avatar
unknown
python
5 months ago
708 B
17
Indexable
def gradient_descent(X, y, learning_rate=0.01, iterations=1000):
    num_samples, num_features = X.shape
    theta = np.zeros((num_features, 1))  # Initialize theta to zeros
    cost_history = []

    for i in range(iterations):
        # Compute predictions
        predictions = X.dot(theta)

        # Compute the error
        error = predictions - y

        # Compute the gradient
        gradients = (1 / num_samples) * X.T.dot(error)

        # Update the parameters
        theta -= learning_rate * gradients

        # Optionally, compute the cost for monitoring progress
        cost = (1 / (2 * num_samples)) * np.sum(error ** 2)
        cost_history.append(cost)

    return theta, cost_history
Editor is loading...
Leave a Comment