Untitled

 avatar
unknown
plain_text
a year ago
3.3 kB
4
Indexable
import os
import numpy as np
import pandas as pd
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
from tensorflow.keras.optimizers import Adam
from sklearn.metrics import mean_squared_error
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
from google.colab import drive


# Path to the folder with the task data
base_path = '/mnt/data/task_data/task'

# Load datasets
train_df = pd.read_csv(os.path.join(base_path, 'train_dataset.csv'))
test_df = pd.read_csv(os.path.join(base_path, 'test_dataset.csv'))

# Append full path to the filenames
train_df['path'] = base_path + '/train_images/train_images/' + train_df['path']
test_df['path'] = base_path + '/test_images/test_images/' + test_df['path']

print(train_df['path'])
print(test_df['path'])

# Preparing ImageDataGenerators for data loading and augmentation
train_datagen = ImageDataGenerator(rescale=1./255,
                                   rotation_range=20,
                                   width_shift_range=0.2,
                                   height_shift_range=0.2,
                                   shear_range=0.1,
                                   zoom_range=0.2,
                                   horizontal_flip=True,
                                   fill_mode='nearest',
                                   validation_split=0.2)  # Using 20% of training data for validation

test_datagen = ImageDataGenerator(rescale=1./255)

# Train generator
train_generator = train_datagen.flow_from_dataframe(
    dataframe=train_df,
    x_col='path',
    y_col='num_shapes',
    target_size=(128, 128),
    batch_size=32,
    class_mode='raw',
    subset='training')

validation_generator = train_datagen.flow_from_dataframe(
    dataframe=train_df,
    x_col='path',
    y_col='num_shapes',
    target_size=(128, 128),
    batch_size=32,
    class_mode='raw',
    subset='validation')

# Build model
model = Sequential([
    Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 3)),
    MaxPooling2D(2, 2),
    Conv2D(64, (3, 3), activation='relu'),
    MaxPooling2D(2, 2),
    Flatten(),
    Dense(128, activation='relu'),
    Dropout(0.5),
    Dense(1, activation='linear')
])

model.compile(optimizer=Adam(lr=1e-4), loss='mse', metrics=['mse'])

# Callbacks
checkpoint = ModelCheckpoint('best_model.h5', save_best_only=True, monitor='val_loss', mode='min')
early_stop = EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=True)

# Model training
history = model.fit(
    train_generator,
    epochs=50,
    validation_data=validation_generator,
    callbacks=[checkpoint, early_stop])

# Load the best model
model.load_weights('best_model.h5')

# Prepare the test data
test_generator = test_datagen.flow_from_dataframe(
    dataframe=test_df,
    x_col='path',
    y_col=None,
    target_size=(128, 128),
    batch_size=32,
    class_mode=None,
    shuffle=False)

# Predict on test data
predictions = model.predict(test_generator)

# Create submission file
submission_df = pd.DataFrame({
    'ID': test_df['ID'],
    'target': np.round(predictions.flatten()).astype(int)  # Rounding off predictions to nearest integer
})
submission_df.to_csv('submission.csv', index=False)
Editor is loading...
Leave a Comment