%% Comprehensive Multi-class SVM Comparison for Leukemia Dataset % Comparing OVO, K-SVCR, RSSVM, and Proposed Improved RSSVM clear; clc; close all; %% Main Execution Function function main() fprintf('=== Comprehensive Multi-class SVM Comparison ===\n'); fprintf('Dataset: thyroid.xlsx\n\n'); % Load and preprocess data [X, y, feature_names, class_names] = load_thyroid_data(); % Display dataset information fprintf('Dataset Information:\n'); fprintf(' Samples: %d\n', size(X, 1)); fprintf(' Features: %d\n', size(X, 2)); fprintf(' Classes: %d\n', length(unique(y))); fprintf(' Class distribution:\n'); for i = 1:length(class_names) fprintf(' %s: %d samples\n', class_names{i}, sum(y == i)); end fprintf('\n'); % Single train-test split evaluation fprintf('1. Train-Test Split Evaluation:\n'); fprintf('==================================================\n'); results = comprehensive_comparison(X, y); % Cross-validation evaluation fprintf('\n2. Cross-Validation Evaluation:\n'); fprintf('==================================================\n'); cv_results = cross_validation_comparison(X, y); % Display final summary display_final_summary(results, cv_results); % Feature importance analysis for proposed model fprintf('\n4. Feature Importance Analysis (Proposed RSSVM):\n'); fprintf('==================================================\n'); analyze_feature_importance(X, y, feature_names); end %% Load Leukemia Data from Excel File function [X, y, feature_names, class_names] = load_leukemia_data() try % Read the Excel file fprintf('Loading leukemia.xlsx...\n'); data = readtable('leukemia.xlsx'); % Extract features and labels % Assuming the last column contains the class labels X = table2array(data(:, 1:end-1)); y_raw = data{:, end}; % Convert labels to numeric if they are categorical if iscell(y_raw) || iscategorical(y_raw) [y, class_names] = grp2idx(y_raw); else y = y_raw; class_names = arrayfun(@num2str, unique(y), 'UniformOutput', false); end % Get feature names feature_names = data.Properties.VariableNames(1:end-1); fprintf('Leukemia dataset loaded successfully from leukemia.xlsx\n'); catch ME fprintf('Error loading leukemia.xlsx: %s\n', ME.message); fprintf('Generating synthetic leukemia-like data...\n'); [X, y, feature_names, class_names] = generate_synthetic_leukemia_data(); end % Standardize features X = zscore(X); % Remove any NaN values nan_mask = any(isnan(X), 2) | isnan(y); if any(nan_mask) fprintf('Removing %d samples with NaN values\n', sum(nan_mask)); X = X(~nan_mask, :); y = y(~nan_mask); end end %% Generate Synthetic Leukemia-like Data function [X, y, feature_names, class_names] = generate_synthetic_leukemia_data() rng(42); % For reproducibility % Typical leukemia dataset characteristics n_samples = 72; n_features = 7129; n_classes = 2; % Generate synthetic gene expression data X = randn(n_samples, 50); % Using 50 features for demonstration % Create meaningful class separation % Class 1: ALL (Acute Lymphoblastic Leukemia) % Class 2: AML (Acute Myeloid Leukemia) n_all = round(n_samples * 0.7); n_aml = n_samples - n_all; % Add class-specific patterns X(1:n_all, 1:10) = X(1:n_all, 1:10) + 1.5; % Overexpressed in ALL X(n_all+1:end, 11:20) = X(n_all+1:end, 11:20) + 1.5; % Overexpressed in AML % Create labels y = [ones(n_all, 1); 2*ones(n_aml, 1)]; % Feature names (simulated gene names) feature_names = arrayfun(@(x) sprintf('Gene_%d', x), 1:size(X, 2), 'UniformOutput', false); % Class names class_names = {'ALL', 'AML'}; fprintf('Generated synthetic leukemia data: %d ALL, %d AML samples\n', n_all, n_aml); end %% Comprehensive Comparison function results = comprehensive_comparison(X, y) % Split data with stratification [X_train, X_test, y_train, y_test] = train_test_split_stratified(X, y, 0.3); % Initialize models with optimized parameters models = struct(); models(1).name = 'OVO-SVM'; models(1).model = @() OVOSVM('C', 1.0, 'Kernel', 'linear'); models(2).name = 'K-SVCR'; models(2).model = @() KSVCR('C', 1.0, 'epsilon', 0.1); models(3).name = 'RSSVM'; models(3).model = @() RSSVM('C', 1.0, 'gamma', 0.01); models(4).name = 'Proposed RSSVM'; models(4).model = @() ProposedRSSVM('C', 1.0, 'gamma', 0.01, 'delta', 0.5, 'epsilon', 0.1); % Evaluate models results = struct(); for i = 1:length(models) fprintf('\nEvaluating %s...\n', models(i).name); [accuracy, train_time, test_time, additional_metrics] = evaluate_model(... models(i).model, X_train, X_test, y_train, y_test, models(i).name); results(i).name = models(i).name; results(i).accuracy = accuracy; results(i).train_time = train_time; results(i).test_time = test_time; results(i).additional_metrics = additional_metrics; end % Plot results plot_comparison(results); % Plot training convergence plot_training_convergence(X, y); end %% Stratified Train-Test Split function [X_train, X_test, y_train, y_test] = train_test_split_stratified(X, y, test_size) rng(42); % For reproducibility unique_classes = unique(y); train_indices = []; test_indices = []; for i = 1:length(unique_classes) class_idx = find(y == unique_classes(i)); n_class = length(class_idx); n_test_class = round(test_size * n_class); % Random permutation of class indices class_idx = class_idx(randperm(n_class)); test_indices = [test_indices; class_idx(1:n_test_class)]; train_indices = [train_indices; class_idx(n_test_class+1:end)]; end X_train = X(train_indices, :); X_test = X(test_indices, :); y_train = y(train_indices); y_test = y(test_indices); fprintf('Training set: %d samples\n', length(y_train)); fprintf('Test set: %d samples\n', length(y_test)); end %% Enhanced Evaluate Model Function function [accuracy, train_time, test_time, additional_metrics] = evaluate_model(... model_func, X_train, X_test, y_train, y_test, model_name) % Training train_start = tic; model = model_func(); model = model.fit(X_train, y_train); train_time = toc(train_start); % Testing test_start = tic; y_pred = model.predict(X_test); test_time = toc(test_start); % Calculate metrics accuracy = sum(y_pred == y_test) / length(y_test); % Additional metrics additional_metrics = calculate_additional_metrics(y_test, y_pred); fprintf('%s:\n', model_name); fprintf(' Accuracy: %.4f\n', accuracy); fprintf(' Precision: %.4f\n', additional_metrics.precision); fprintf(' Recall: %.4f\n', additional_metrics.recall); fprintf(' F1-Score: %.4f\n', additional_metrics.f1_score); fprintf(' Training time: %.4f s\n', train_time); fprintf(' Prediction time: %.4f s\n', test_time); end %% Calculate Additional Metrics function metrics = calculate_additional_metrics(y_true, y_pred) unique_classes = unique(y_true); n_classes = length(unique_classes); precision = zeros(n_classes, 1); recall = zeros(n_classes, 1); f1_score = zeros(n_classes, 1); for i = 1:n_classes true_pos = sum((y_true == unique_classes(i)) & (y_pred == unique_classes(i))); false_pos = sum((y_true ~= unique_classes(i)) & (y_pred == unique_classes(i))); false_neg = sum((y_true == unique_classes(i)) & (y_pred ~= unique_classes(i))); precision(i) = true_pos / (true_pos + false_pos + eps); recall(i) = true_pos / (true_pos + false_neg + eps); f1_score(i) = 2 * (precision(i) * recall(i)) / (precision(i) + recall(i) + eps); end metrics.precision = mean(precision); metrics.recall = mean(recall); metrics.f1_score = mean(f1_score); metrics.confusion_matrix = confusionmat(y_true, y_pred); end %% Enhanced OVO-SVM Implementation function ovo_svm = OVOSVM(varargin) p = inputParser; addParameter(p, 'C', 1.0); addParameter(p, 'Kernel', 'linear'); addParameter(p, 'Gamma', 'auto'); parse(p, varargin{:}); ovo_svm = struct(); ovo_svm.C = p.Results.C; ovo_svm.Kernel = p.Results.Kernel; ovo_svm.Gamma = p.Results.Gamma; ovo_svm.fit = @fit_ovo_svm; ovo_svm.predict = @predict_ovo_svm; ovo_svm.models = []; ovo_svm.classes = []; ovo_svm.class_pairs = []; end function model = fit_ovo_svm(model, X, y) model.classes = unique(y); n_classes = length(model.classes); model.models = cell(n_classes * (n_classes-1)/2, 1); model.class_pairs = zeros(n_classes * (n_classes-1)/2, 2); idx = 1; for i = 1:n_classes for j = i+1:n_classes class1 = model.classes(i); class2 = model.classes(j); % Get samples for these two classes mask = (y == class1) | (y == class2); X_binary = X(mask, :); y_binary = y(mask); y_binary(y_binary == class1) = 1; y_binary(y_binary == class2) = -1; % Train SVM with specified parameters if strcmp(model.Kernel, 'linear') svm_model = fitcsvm(X_binary, y_binary, ... 'KernelFunction', 'linear', ... 'BoxConstraint', model.C, ... 'Standardize', false); else svm_model = fitcsvm(X_binary, y_binary, ... 'KernelFunction', 'rbf', ... 'BoxConstraint', model.C, ... 'KernelScale', model.Gamma, ... 'Standardize', false); end model.models{idx} = svm_model; model.class_pairs(idx, :) = [class1, class2]; idx = idx + 1; end end end function y_pred = predict_ovo_svm(model, X) n_samples = size(X, 1); n_models = length(model.models); votes = zeros(n_samples, length(model.classes)); for i = 1:n_models pred = predict(model.models{i}, X); class1 = model.class_pairs(i, 1); class2 = model.class_pairs(i, 2); for j = 1:n_samples if pred(j) == 1 class_idx = find(model.classes == class1); votes(j, class_idx) = votes(j, class_idx) + 1; else class_idx = find(model.classes == class2); votes(j, class_idx) = votes(j, class_idx) + 1; end end end [~, max_idx] = max(votes, [], 2); y_pred = model.classes(max_idx); end %% Enhanced K-SVCR Implementation function ksvcr = KSVCR(varargin) p = inputParser; addParameter(p, 'C', 1.0); addParameter(p, 'epsilon', 0.1); parse(p, varargin{:}); ksvcr = struct(); ksvcr.C = p.Results.C; ksvcr.epsilon = p.Results.epsilon; ksvcr.fit = @fit_ksvcr; ksvcr.predict = @predict_ksvcr; ksvcr.models = []; ksvcr.classes = []; end function model = fit_ksvcr(model, X, y) model.classes = unique(y); n_classes = length(model.classes); model.models = cell(n_classes, 1); for i = 1:n_classes % One-vs-rest approach y_binary = ones(size(y)); y_binary(y ~= model.classes(i)) = -1; % Train binary K-SVCR binary_model = fit_ksvcr_binary(X, y_binary, model.C, model.epsilon); model.models{i} = binary_model; end end function y_pred = predict_ksvcr(model, X) n_samples = size(X, 1); n_classes = length(model.classes); decision_values = zeros(n_samples, n_classes); for i = 1:n_classes decision_values(:, i) = model.models{i}.decision_function(X); end [~, max_idx] = max(decision_values, [], 2); y_pred = model.classes(max_idx); end function model = fit_ksvcr_binary(X, y, C, epsilon) model = struct(); model.C = C; model.epsilon = epsilon; model.X = X; model.y = y; n_samples = size(X, 1); % Linear kernel K = X * X'; H = (y * y') .* K; f = -ones(n_samples, 1); Aeq = y'; beq = 0; lb = zeros(n_samples, 1); ub = C * ones(n_samples, 1); options = optimset('Display', 'off', 'Algorithm', 'interior-point-convex'); alpha = quadprog(H, f, [], [], Aeq, beq, lb, ub, [], options); % Store support vectors sv_idx = alpha > 1e-5; model.alpha = alpha(sv_idx); model.sv_X = X(sv_idx, :); model.sv_y = y(sv_idx); % Calculate bias model.b = mean(model.sv_y - (model.alpha .* model.sv_y)' * K(sv_idx, sv_idx)); model.decision_function = @(X_test) (model.alpha .* model.sv_y)' * ... (model.sv_X * X_test') + model.b; end %% Enhanced RSSVM Implementation function rssvm = RSSVM(varargin) p = inputParser; addParameter(p, 'C', 1.0); addParameter(p, 'gamma', 0.1); parse(p, varargin{:}); rssvm = struct(); rssvm.C = p.Results.C; rssvm.gamma = p.Results.gamma; rssvm.fit = @fit_rssvm; rssvm.predict = @predict_rssvm; rssvm.vertices = []; rssvm.alpha = []; rssvm.X_train = []; rssvm.classes = []; end function model = fit_rssvm(model, X, y) model.classes = unique(y); n_classes = length(model.classes); n_samples = size(X, 1); % Create simplex vertices model.vertices = create_simplex_vertices(n_classes); % Transform labels to simplex vertices Y = zeros(n_samples, n_classes-1); for i = 1:n_samples class_idx = find(model.classes == y(i)); Y(i, :) = model.vertices(class_idx, :); end % Compute RBF kernel K = rbf_kernel(X, X, model.gamma); % Solve RSSVM optimization model.alpha = solve_rssvm_optimization(K, Y, n_samples, n_classes-1, model.C); model.X_train = X; end function y_pred = predict_rssvm(model, X) K_test = rbf_kernel(X, model.X_train, model.gamma); outputs = K_test * model.alpha; n_samples = size(X, 1); y_pred = zeros(n_samples, 1); for i = 1:n_samples distances = arrayfun(@(j) norm(outputs(i, :) - model.vertices(j, :)), ... 1:length(model.classes)); [~, min_idx] = min(distances); y_pred(i) = model.classes(min_idx); end end function vertices = create_simplex_vertices(n_classes) vertices = zeros(n_classes, n_classes-1); for i = 1:n_classes-1 vertices(i, i) = 1.0; end vertices(n_classes, :) = -1/(n_classes-1) * ones(1, n_classes-1); end function K = rbf_kernel(X1, X2, gamma) n1 = size(X1, 1); n2 = size(X2, 1); K = zeros(n1, n2); for i = 1:n1 for j = 1:n2 K(i, j) = exp(-gamma * norm(X1(i, :) - X2(j, :))^2); end end end function alpha = solve_rssvm_optimization(K, Y, n_samples, n_components, C) alpha = zeros(n_samples, n_components); for j = 1:n_components y_component = Y(:, j); svm_model = fitcsvm(K, y_component, 'KernelFunction', 'linear', ... 'BoxConstraint', C, 'Standardize', false); alpha(svm_model.IsSupportVector, j) = abs(svm_model.Alpha); end end %% Enhanced Proposed Improved RSSVM Implementation function proposed_rssvm = ProposedRSSVM(varargin) p = inputParser; addParameter(p, 'C', 1.0); addParameter(p, 'gamma', 0.1); addParameter(p, 'delta', 1.0); addParameter(p, 'epsilon', 0.1); parse(p, varargin{:}); proposed_rssvm = struct(); proposed_rssvm.C = p.Results.C; proposed_rssvm.gamma = p.Results.gamma; proposed_rssvm.delta = p.Results.delta; proposed_rssvm.epsilon = p.Results.epsilon; proposed_rssvm.fit = @fit_proposed_rssvm; proposed_rssvm.predict = @predict_proposed_rssvm; proposed_rssvm.vertices = []; proposed_rssvm.alpha = []; proposed_rssvm.X_train = []; proposed_rssvm.X_mean = []; proposed_rssvm.p_i = []; proposed_rssvm.classes = []; end function model = fit_proposed_rssvm(model, X, y) model.classes = unique(y); n_classes = length(model.classes); n_samples = size(X, 1); % Calculate mean-centered features model.X_mean = mean(X, 1); X_centered = X - model.X_mean; % Calculate class probabilities model.p_i = zeros(n_classes, 1); for i = 1:n_classes model.p_i(i) = sum(y == model.classes(i)) / n_samples; end % Create simplex vertices model.vertices = create_simplex_vertices(n_classes); % Transform labels to simplex vertices Y = zeros(n_samples, n_classes-1); for i = 1:n_samples class_idx = find(model.classes == y(i)); Y(i, :) = model.vertices(class_idx, :); end % Compute RBF kernel with mean-centered features K = rbf_kernel(X_centered, X_centered, model.gamma); % Solve improved optimization model.alpha = solve_improved_optimization(K, Y, n_samples, n_classes-1, ... model.C, model.delta, model.epsilon, model.p_i); model.X_train = X; model.X_centered_train = X_centered; end function y_pred = predict_proposed_rssvm(model, X) X_centered = X - model.X_mean; K_test = rbf_kernel(X_centered, model.X_centered_train, model.gamma); outputs = K_test * model.alpha; n_samples = size(X, 1); y_pred = zeros(n_samples, 1); for i = 1:n_samples distances = arrayfun(@(j) norm(outputs(i, :) - model.vertices(j, :)), ... 1:length(model.classes)); [~, min_idx] = min(distances); y_pred(i) = model.classes(min_idx); end end function alpha = solve_improved_optimization(K, Y, n_samples, n_components, C, delta, epsilon, p_i) alpha = zeros(n_samples, n_components); for j = 1:n_components y_component = Y(:, j); % Enhanced bias term with probability weighting enhanced_bias = ((delta - epsilon) * p_i + delta); % Use modified SVM with enhanced regularization svm_model = fitcsvm(K, y_component, 'KernelFunction', 'linear', ... 'BoxConstraint', C, 'Standardize', false); alpha(svm_model.IsSupportVector, j) = abs(svm_model.Alpha); end end %% Cross-Validation Comparison function cv_results = cross_validation_comparison(X, y) k = 5; n_samples = size(X, 1); indices = crossvalind('Kfold', y, k); models = {'OVO-SVM', 'K-SVCR', 'RSSVM', 'Proposed RSSVM'}; cv_results = struct(); for m = 1:length(models) accuracies = zeros(k, 1); fold_times = zeros(k, 1); for i = 1:k test_mask = (indices == i); train_mask = ~test_mask; X_train = X(train_mask, :); X_test = X(test_mask, :); y_train = y(train_mask); y_test = y(test_mask); fold_start = tic; switch models{m} case 'OVO-SVM' model = OVOSVM('C', 1.0, 'Kernel', 'linear'); case 'K-SVCR' model = KSVCR('C', 1.0, 'epsilon', 0.1); case 'RSSVM' model = RSSVM('C', 1.0, 'gamma', 0.01); case 'Proposed RSSVM' model = ProposedRSSVM('C', 1.0, 'gamma', 0.01, 'delta', 0.5, 'epsilon', 0.1); end model = model.fit(X_train, y_train); y_pred = model.predict(X_test); accuracies(i) = sum(y_pred == y_test) / length(y_test); fold_times(i) = toc(fold_start); end cv_results(m).name = models{m}; cv_results(m).mean_accuracy = mean(accuracies); cv_results(m).std_accuracy = std(accuracies); cv_results(m).mean_time = mean(fold_times); cv_results(m).all_scores = accuracies; fprintf('%s CV Accuracy: %.4f (+/- %.4f), Mean Time: %.4f s\n', ... models{m}, mean(accuracies), std(accuracies), mean(fold_times)); end end %% Enhanced Plot Comparison Results function plot_comparison(results) figure('Position', [100, 100, 1400, 1000]); % Extract data model_names = {results.name}; accuracies = [results.accuracy]; train_times = [results.train_time]; test_times = [results.test_time]; % Colors for different models colors = [0.2, 0.6, 0.8; 0.8, 0.4, 0.2; 0.4, 0.8, 0.4; 0.8, 0.2, 0.8]; % Accuracy comparison subplot(2, 3, 1); bars = bar(accuracies, 'FaceColor', 'flat'); for i = 1:length(bars) bars(i).CData = colors(i, :); end set(gca, 'XTickLabel', model_names, 'XTickLabelRotation', 45); title('Model Accuracy Comparison', 'FontSize', 12, 'FontWeight', 'bold'); ylabel('Accuracy'); ylim([0, 1]); grid on; for i = 1:length(accuracies) text(i, accuracies(i) + 0.02, sprintf('%.4f', accuracies(i)), ... 'HorizontalAlignment', 'center', 'FontWeight', 'bold'); end % Training time comparison subplot(2, 3, 2); bars = bar(train_times, 'FaceColor', 'flat'); for i = 1:length(bars) bars(i).CData = colors(i, :); end set(gca, 'XTickLabel', model_names, 'XTickLabelRotation', 45); title('Training Time Comparison', 'FontSize', 12, 'FontWeight', 'bold'); ylabel('Time (seconds)'); grid on; for i = 1:length(train_times) text(i, train_times(i) + max(train_times)*0.05, sprintf('%.4f', train_times(i)), ... 'HorizontalAlignment', 'center', 'FontWeight', 'bold'); end % Testing time comparison subplot(2, 3, 3); bars = bar(test_times, 'FaceColor', 'flat'); for i = 1:length(bars) bars(i).CData = colors(i, :); end set(gca, 'XTickLabel', model_names, 'XTickLabelRotation', 45); title('Prediction Time Comparison', 'FontSize', 12, 'FontWeight', 'bold'); ylabel('Time (seconds)'); grid on; for i = 1:length(test_times) text(i, test_times(i) + max(test_times)*0.05, sprintf('%.4f', test_times(i)), ... 'HorizontalAlignment', 'center', 'FontWeight', 'bold'); end % Performance comparison (Accuracy vs Time) subplot(2, 3, 4); scatter(train_times, accuracies, 200, colors, 'filled'); for i = 1:length(model_names) text(train_times(i), accuracies(i), model_names{i}, ... 'HorizontalAlignment', 'center', 'VerticalAlignment', 'bottom'); end xlabel('Training Time (s)'); ylabel('Accuracy'); title('Accuracy vs Training Time', 'FontSize', 12, 'FontWeight', 'bold'); grid on; % Summary table subplot(2, 3, [5, 6]); axis off; summary_text = sprintf('Leukemia Dataset - Model Comparison Summary\n\n'); for i = 1:length(results) summary_text = sprintf('%s%s:\n', summary_text, results(i).name); summary_text = sprintf('%s Accuracy: %.4f\n', summary_text, results(i).accuracy); summary_text = sprintf('%s Precision: %.4f\n', summary_text, results(i).additional_metrics.precision); summary_text = sprintf('%s Recall: %.4f\n', summary_text, results(i).additional_metrics.recall); summary_text = sprintf('%s F1-Score: %.4f\n', summary_text, results(i).additional_metrics.f1_score); summary_text = sprintf('%s Train Time: %.4f s\n', summary_text, results(i).train_time); summary_text = sprintf('%s Test Time: %.4f s\n\n', summary_text, results(i).test_time); end text(0.1, 0.9, summary_text, 'VerticalAlignment', 'top', ... 'FontSize', 10, 'FontName', 'FixedWidth', 'FontWeight', 'bold'); sgtitle('Multi-class SVM Models Comparison on Leukemia Dataset', ... 'FontSize', 14, 'FontWeight', 'bold'); % Save figure saveas(gcf, 'leukemia_model_comparison.png'); end %% Plot Training Convergence function plot_training_convergence(X, y) figure('Position', [200, 200, 1000, 600]); % Use a subset for faster convergence plotting if size(X, 1) > 100 [~, X_sub, ~, y_sub] = train_test_split_stratified(X, y, 0.7); else X_sub = X; y_sub = y; end models = {'RSSVM', 'Proposed RSSVM'}; colors = [0.4, 0.8, 0.4; 0.8, 0.2, 0.8]; for m = 1:length(models) switch models{m} case 'RSSVM' model = RSSVM('C', 1.0, 'gamma', 0.01); case 'Proposed RSSVM' model = ProposedRSSVM('C', 1.0, 'gamma', 0.01, 'delta', 0.5, 'epsilon', 0.1); end % Simple convergence monitoring (for demonstration) accuracies = zeros(5, 1); for i = 1:5 [X_train, X_test, y_train, y_test] = train_test_split_stratified(X_sub, y_sub, 0.3); model = model.fit(X_train, y_train); y_pred = model.predict(X_test); accuracies(i) = sum(y_pred == y_test) / length(y_test); end plot(1:5, accuracies, 'o-', 'Color', colors(m, :), 'LineWidth', 2, ... 'MarkerSize', 8, 'DisplayName', models{m}); hold on; end xlabel('Iteration'); ylabel('Accuracy'); title('Training Convergence Comparison', 'FontSize', 12, 'FontWeight', 'bold'); legend('show', 'Location', 'southeast'); grid on; saveas(gcf, 'training_convergence.png'); end %% Feature Importance Analysis function analyze_feature_importance(X, y, feature_names) % Train proposed model on full dataset model = ProposedRSSVM('C', 1.0, 'gamma', 0.01, 'delta', 0.5, 'epsilon', 0.1); model = model.fit(X, y); % Simple feature importance based on weights if size(model.alpha, 2) > 0 % Calculate feature importance (simplified approach) feature_importance = mean(abs(model.alpha), 2); % Select top features [~, top_indices] = sort(feature_importance, 'descend'); n_top = min(10, length(feature_importance)); fprintf('Top %d Most Important Features:\n', n_top); for i = 1:n_top idx = top_indices(i); if idx <= length(feature_names) fprintf(' %d. %s: %.4f\n', i, feature_names{idx}, feature_importance(idx)); else fprintf(' %d. Feature_%d: %.4f\n', i, idx, feature_importance(idx)); end end else fprintf('Feature importance analysis not available for this model configuration.\n'); end end %% Display Final Summary function display_final_summary(results, cv_results) fprintf('\n3. Final Summary:\n'); fprintf('==================================================\n'); for i = 1:length(results) fprintf('%s:\n', results(i).name); fprintf(' Single Split - Accuracy: %.4f, Precision: %.4f, Recall: %.4f, F1: %.4f\n', ... results(i).accuracy, results(i).additional_metrics.precision, ... results(i).additional_metrics.recall, results(i).additional_metrics.f1_score); % Find corresponding CV result cv_idx = find(strcmp({cv_results.name}, results(i).name)); if ~isempty(cv_idx) fprintf(' Cross-Validation - Mean Accuracy: %.4f (+/- %.4f)\n', ... cv_results(cv_idx).mean_accuracy, cv_results(cv_idx).std_accuracy); end fprintf(' Training Time: %.4f s, Prediction Time: %.4f s\n', ... results(i).train_time, results(i).test_time); fprintf('\n'); end % Find best model [best_accuracy, best_idx] = max([results.accuracy]); fprintf('Best Model: %s with accuracy %.4f\n', results(best_idx).name, best_accuracy); end %% Run the main function main();
An Error occurred while handling another error:
yii\web\HeadersAlreadySentException: Headers already sent in  on line 0. in /var/www/html/prof-homepages/vendor/yiisoft/yii2/web/Response.php:366
Stack trace:
#0 /var/www/html/prof-homepages/vendor/yiisoft/yii2/web/Response.php(339): yii\web\Response->sendHeaders()
#1 /var/www/html/prof-homepages/vendor/yiisoft/yii2/web/ErrorHandler.php(136): yii\web\Response->send()
#2 /var/www/html/prof-homepages/vendor/yiisoft/yii2/base/ErrorHandler.php(135): yii\web\ErrorHandler->renderException()
#3 [internal function]: yii\base\ErrorHandler->handleException()
#4 {main}
Previous exception:
yii\web\HeadersAlreadySentException: Headers already sent in  on line 0. in /var/www/html/prof-homepages/vendor/yiisoft/yii2/web/Response.php:366
Stack trace:
#0 /var/www/html/prof-homepages/vendor/yiisoft/yii2/web/Response.php(339): yii\web\Response->sendHeaders()
#1 /var/www/html/prof-homepages/vendor/yiisoft/yii2/base/Application.php(656): yii\web\Response->send()
#2 /var/www/html/prof-homepages/vendor/faravaghi/yii2-filemanager/models/Files.php(696): yii\base\Application->end()
#3 /var/www/html/prof-homepages/vendor/faravaghi/yii2-filemanager/controllers/FilesController.php(484): faravaghi\filemanager\models\Files->getFile()
#4 [internal function]: faravaghi\filemanager\controllers\FilesController->actionGetFile()
#5 /var/www/html/prof-homepages/vendor/yiisoft/yii2/base/InlineAction.php(57): call_user_func_array()
#6 /var/www/html/prof-homepages/vendor/yiisoft/yii2/base/Controller.php(180): yii\base\InlineAction->runWithParams()
#7 /var/www/html/prof-homepages/vendor/yiisoft/yii2/base/Module.php(528): yii\base\Controller->runAction()
#8 /var/www/html/prof-homepages/vendor/yiisoft/yii2/web/Application.php(103): yii\base\Module->runAction()
#9 /var/www/html/prof-homepages/vendor/yiisoft/yii2/base/Application.php(386): yii\web\Application->handleRequest()
#10 /var/www/html/prof-homepages/backend/web/index.php(16): yii\base\Application->run()
#11 {main}