Untitled
unknown
python
a year ago
870 B
15
Indexable
def gradient_descent_with_regularization(X, y, learning_rate=0.01, iterations=1000, l1=0.0, l2=0.0):
num_samples, num_features = X.shape
theta = np.zeros((num_features, 1)) # Initialize theta to zeros
cost_history = []
for i in range(iterations):
# Compute predictions
predictions = X.dot(theta)
# Compute the error
error = predictions - y
# Compute the gradient with L1 and L2 regularization
gradients = (1 / num_samples) * X.T.dot(error) + l1 * np.sign(theta) + l2 * theta
# Update the parameters
theta -= learning_rate * gradients
# Optionally, compute the cost for monitoring progress
cost = (1 / (2 * num_samples)) * np.sum(error ** 2) + l1 * np.sum(np.abs(theta)) + (l2 / 2) * np.sum(theta ** 2)
cost_history.append(cost)
return theta, cost_history
Editor is loading...
Leave a Comment