Untitled

 avatar
unknown
plain_text
5 months ago
2.8 kB
3
Indexable
% Load data
load IRIS_IN.csv; % Input data
load IRIS_OUT.csv; % Target output
input = IRIS_IN;

% One-hot encoding for target output
numClasses = 3; % Number of classes
target = zeros(size(IRIS_OUT, 1), numClasses);
for i = 1:size(IRIS_OUT, 1)
    target(i, IRIS_OUT(i)) = 1; % Assuming IRIS_OUT has values 1, 2, 3
end

% Initialize weights
WeightIn = 2 * rand(4, 10) - 1;  % Weights from input to hidden layer
WeightHidden = 2 * rand(10, numClasses) - 1;  % Weights from hidden to output layer

% Parameters
epoch = 100;
trainDataSize = 75;
testDataSize = 75;

% Preallocate RMSE array
RMSE = zeros(epoch, 1);

for currentEpoch = 1:epoch
    Tot_Error = 0;
    for currentTrainDataIndex = 1:trainDataSize
        % Forward propagation
        Hidden = input(currentTrainDataIndex, :) * WeightIn;
        logsigHidden = logsig(Hidden);
        
        Output = logsigHidden * WeightHidden;
        % softmaxOutput = exp(Output) ./ sum(exp(Output), 2); % Softmax activation for multi-class output

        % Error calculation using cross-entropy loss
        DeltaOutput = target(currentTrainDataIndex, :) - Output;
        Tot_Error = Tot_Error + sum(DeltaOutput.^2); % Accumulate squared error

        % Backpropagation
        DeltaHidden = (DeltaOutput * WeightHidden') .* dlogsig(logsigHidden);  % Apply derivative element-wise
        WeightHidden = WeightHidden + 0.45 * logsigHidden' * DeltaOutput;

        % Update input weights
        WeightIn = WeightIn + 0.45 * input(currentTrainDataIndex, :)' * DeltaHidden;
    end
    
    % Calculate and store RMSE (using mean squared error for simplicity)
    RMSE(currentEpoch) = sqrt(Tot_Error / trainDataSize);
    % fprintf('Epoch: %d, RMSE: %f\n', currentEpoch, RMSE(currentEpoch));
end

% Plot RMSE
figure;
plot(1:epoch, RMSE);
legend('Training');
ylabel('RMSE');
xlabel('Epoch');

% Testing phase
Tot_Correct = 0;
OutputList = zeros(testDataSize, numClasses); % Preallocate output list

for currentTestDataIndex = trainDataSize + 1:trainDataSize + testDataSize
    Hidden = input(currentTestDataIndex, :) * WeightIn;
    logsigHidden = logsig(Hidden);
    Output = logsigHidden * WeightHidden;
    OutputList(currentTestDataIndex - trainDataSize, :) = Output; 
    fprintf('data %d, ',currentTestDataIndex)
   
    fprintf('\n')
    %softmaxOutput = exp(Output) ./ sum(exp(Output), 2); % Softmax activation
    %OutputList(currentTestDataIndex - trainDataSize, :) = softmaxOutput;
    [~, predictedClass] = max(Output); % Get predicted class (index)
 
    if predictedClass == IRIS_OUT(currentTestDataIndex) % Compare with original class
        Tot_Correct = Tot_Correct + 1;
    end
end
OutputList
Tot_Percent = Tot_Correct / testDataSize;
fprintf('Test Correct Percentage: %f\n', Tot_Percent);
Editor is loading...
Leave a Comment