Untitled
unknown
python
a year ago
992 B
10
Indexable
import numpy as np
# Sigmoid activation function
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Derivative of sigmoid function
def sigmoid_derivative(x):
return x * (1 - x)
# Parameters
alpha = 0.25 # Learning rate
input_pattern = np.array([0, 1]) # Input pattern
target_output = 1 # Target output
initial_weights = np.array([0.5, -0.3]) # Initial weights for simplicity
# Step 1: Feedforward pass
net_input = np.dot(initial_weights, input_pattern) # net = w_1 * 0 + w_2 * 1 = w_2
output = sigmoid(net_input) # Output of the neuron
# Step 2: Calculate error
error = target_output - output
# Step 3: Backpropagate error and update weights
delta = error * sigmoid_derivative(output) # Gradient for weight update
weight_update = alpha * delta * input_pattern # Compute weight changes
# Step 4: Update weights
new_weights = initial_weights + weight_update
# Output results
print(f"Initial Output: {output}")
print(f"Error: {error}")
print(f"Updated Weights: {new_weights}")Editor is loading...
Leave a Comment