Untitled

 avatar
unknown
plain_text
5 months ago
1.5 kB
2
Indexable
import numpy as np
import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation
from tensorflow.keras.optimizers import SGD
import matplotlib.pyplot as plt
from tensorflow.keras.datasets import mnist

(x_train, y_train), (x_test, y_test) = mnist.load_data()

x_train = x_train.reshape(-1, 784).astype('float32') / 255
x_test = x_test.reshape(-1, 784).astype('float32') / 255
y_train = np.where(y_train < 5, 0, 1)
y_test = np.where(y_test < 5, 0, 1)

#loss function: x · w
def logistic_loss(y_true, y_pred):
    loss = K.log(1 + K.exp(-y_pred)) + (1 - y_true) * y_pred
    return K.mean(loss)

# Define the logistic regression model
model = Sequential()

# model.add(Dense(512, input_shape=(784,)))
# model.add(Activation('relu'))
# model.add(Dropout(0.2))
model.add(Dense(1, input_shape=(784,), activation='linear'))  # No activation function, just dot product

# Compile the model with custom logistic loss and SGD
model.compile(optimizer=SGD(learning_rate=0.01), loss=logistic_loss, metrics=['accuracy'])

# Train the model
loss_list = model.fit(x_train, y_train, epochs=100, batch_size=32, validation_data=(x_test, y_test))

# Plot the loss function over epochs
plt.plot(loss_list.history['loss'], label='Training Loss')
plt.title('Logistic Regression Loss Over Epochs')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
Editor is loading...
Leave a Comment