import keras.losses
import numpy as np
import os
import tensorflow as tf
import tensorflow_datasets as tfds
import PIL
import PIL.Image
import matplotlib.pyplot as plt
import pathlib
from sklearn.model_selection import KFold
from tensorflow import keras
from keras.regularizers import l2
no_of_class = 40
def get_model(pretrained_model, dense_size, dropout, augment, fine_tuning=0):
pretrained_model.trainable = False
if fine_tuning > 0:
for layer in pretrained_model.layers[-fine_tuning:]:
layer.trainable = True
model = keras.Sequential()
# if augment:
# model.add(data_augmentation)
model.add(pretrained_model)
model.add(keras.layers.Flatten())
model.add(keras.layers.Dropout(dropout))
model.add(keras.layers.Dense(dense_size, activation='relu', kernel_regularizer=l2(0.002)))
model.add(keras.layers.Dense(no_of_class, activation='softmax'))
model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
def load_data():
data_dir = pathlib.Path("Mushrooms")
image_count = len(list(data_dir.glob('*/*')))
train_ds = tf.keras.utils.image_dataset_from_directory(
data_dir,
validation_split=0.0,
seed=123,
image_size=(img_size, img_size),
batch_size=image_count,
shuffle=False
)
return train_ds
# hyperparams
batch_size = 32
epochs = 10
verbosity = 1
# model parameters
dense_size = 128 #256, 512]
img_size = 200
dropout = 0.3 # 0.1 0.5
augment = False
train_ds = load_data()
images, labels = next(iter(train_ds))
# Define the K-fold Cross Validator
num_folds = 5
kfold = KFold(n_splits=num_folds, shuffle=True)
# K-fold Cross Validation model evaluation
fold_no = 1
Mobilenetv3Large = tf.keras.applications.MobileNetV3Large(include_top=False, input_shape=(img_size, img_size, 3), pooling='avg', classes=no_of_class, weights='imagenet')
for train, test in kfold.split(images, labels):
model = get_model(Mobilenetv3Large, dense_size, dropout, augment=augment)
# Podział zbioru danych na zbiór uczący i walidacyjny
train_images, train_labels = tf.gather(images, indices=train), tf.gather(labels, indices=train)
val_images, val_labels = tf.gather(images, indices=test), tf.gather(labels, indices=test)
history = model.fit(
train_images, train_labels,
epochs=epochs,
validation_data=(val_images, val_labels),
verbose=verbosity
)
# Wyświetlenie wartości utraty i dokładności na zbiorze walidacyjnym po każdej epoce
val_loss, val_accuracy = model.evaluate(val_images, val_labels, verbose=0)
print(f'Validation accuracy for fold {fold_no}: {val_accuracy}')
print(f'Validation loss for fold {fold_no}: {val_loss}')
fold_no += 1
epochs_range = range(epochs)