,kjk,jlkj
לחעיאיכאעגיע
[X_tr y0_tr] = checkboard(1000);
scatter(X_tr(1,:),X_tr(2,:),y0_tr+2)
[X_test, y0_test] = checkboard(5000);
%
Learning:
close all;
dif_thresh = 0.001;
%for dif_thresh = logspace(-3,1,5)
results = zeros(4,6);
i =1;
%
for i = 1:4
y_err = 0;
y_gen_err = 0;
%dif_thresh = ;
kernel = 'gaussian';
sigma = 0.1;
% sl = length(sigma_all);
%
%
%
%
y_tr_ = zeros(length(y0_tr), sl);
y_test = zeros(length(y0_test), sl);
alpha = zeros(length(y0_tr), sl);
epochs = zeros(3);
% for j = 1:2
%
for i = 1:10
%sigma = sigma_all(j);
K = calc_kernel(X_tr,X_tr,kernel,sigma);
[alpha converged epochs] = adatron_kernel(X_tr,y0_tr,dif_thresh,K);
epochs
support_num = sum(alpha>10^-5)
% w = (alpha.*y0_tr )'*X_tr';
%w_err = sum((w' - w0).^2);
converged
y_tr = sign(sum((repmat((alpha.*y0_tr),[1 size(X_tr,2)]).*K)))';
y_tr_err = sum(abs(y_tr-y0_tr)/2)
K2 = calc_kernel(X_test,X_tr,kernel,sigma);
y_test = sign(sum((repmat((alpha.*y0_tr),[1 size(X_test,2)]).*K2)))';
% y_test = y_test_curr;
y_gen_err = sum(abs(y0_test-y_test))/2
percent_err = y_gen_err/length(y_test)
%cos_gamma = w*w0/(norm(w0)*norm(w));
% results(i,:) = [length(X), support_num, y_err, y_gen_err];
%end
dif_thresh
% titles = {'P', 'support num','y_err', 'y_gen_err'}
% results
%end
figure
hold on;
% plot(X_tr(1,y0_tr==1),X_tr(2,y0_tr==1),'.k','markersize',10); hold all
% plot(X_tr(1,y0_tr==-1),X_tr(2,y0_tr==-1),'.g','markersize',10)
% plot(X_tr(1,Y~=y0_tr),X_tr(2,Y~=y0_tr),'xk', 'markersize',13,
'linewidth',1.5)
plot(X_test(1,y_test==1),X_test(2,y_test==1),'.k','markersize',10);
plot(X_test(1,y_test==-1),X_test(2,y_test==-1),'.g','markersize',10)
plot(X_test(1,y_test~=y0_test),X_test(2,y_test~=y0_test),'xk', 'markersize',7,
'linewidth',1.5)
plot(X_tr(1,alpha~=0),X_tr(2,alpha~=0),'or', 'markersize',7, 'linewidth',1.5)
title('Gaussian kernel','fontsize',12)
legend('test = 1', 'test = -1', 'wrong classifcation','Support Vectors')
Polynomial Kernel
1
0.9
0.8
0.7
0.6
0.5
0.4
0.3
0.2
0.1
0
0
0.1
0.2
0.3
0.4
0.5
0.6
0.7
0.8
0.9
1
test = 1
test = -1
wrong classifcation
Support Vectors
%
Learning:
% close all;
dif_thresh = 0.001;
%for dif_thresh = logspace(-3,1,5)
% i =1;
%
for i = 1:4
y_err = 0;
y_gen_err = 0;
%dif_thresh = ;
kernel = 'poly';
d = 10;
% sl = length(sigma_all);
%
%
%
%
y_tr_ = zeros(length(y0_tr), sl);
y_test = zeros(length(y0_test), sl);
alpha = zeros(length(y0_tr), sl);
epochs = zeros(3);
% for j = 1:2
%
for i = 1:10
%sigma = sigma_all(j);
K = calc_kernel(X_tr,X_tr,kernel,d);
[alpha converged epochs] = adatron_kernel(X_tr,y0_tr,dif_thresh,K);
epochs
support_num = sum(alpha>0)
% w = (alpha.*y0_tr )'*X_tr';
%w_err = sum((w' - w0).^2);
converged
y_tr = sign(sum((repmat((alpha.*y0_tr),[1 size(X_tr,2)]).*K)))';
y_tr_err = sum(abs(y_tr-y0_tr)/2)
K2 = calc_kernel(X_test,X_tr,kernel,d);
y_test = sign(sum((repmat((alpha.*y0_tr),[1 size(X_test,2)]).*K2)))';
% y_test = y_test_curr;
y_gen_err = sum(abs(y0_test-y_test))/2
percent_err = y_gen_err/length(y_test)
%cos_gamma = w*w0/(norm(w0)*norm(w));
% results(i,:) = [length(X), support_num, y_err, y_gen_err];
%end
dif_thresh
% titles = {'P', 'support num','y_err', 'y_gen_err'}
% results
%end
figure
hold on;
% plot(X_tr(1,y0_tr==1),X_tr(2,y0_tr==1),'.k','markersize',10); hold all
% plot(X_tr(1,y0_tr==-1),X_tr(2,y0_tr==-1),'.g','markersize',10)
% plot(X_tr(1,Y~=y0_tr),X_tr(2,Y~=y0_tr),'xk', 'markersize',13,
'linewidth',1.5)
plot(X_test(1,y_test==1),X_test(2,y_test==1),'.k','markersize',10);
plot(X_test(1,y_test==-1),X_test(2,y_test==-1),'.g','markersize',10)
plot(X_test(1,y_test~=y0_test),X_test(2,y_test~=y0_test),'xk', 'markersize',7,
'linewidth',1.5)
plot(X_tr(1,alpha~=0),X_tr(2,alpha~=0),'or', 'markersize',7, 'linewidth',1.5)
title('Polynomial kernel','fontsize',12)
legend('test = 1', 'test = -1', 'wrong classifcation','Support Vectors')
Gaussian Kernel
1
0.9
0.8
0.7
0.6
0.5
0.4
0.3
0.2
0.1
0
0
0.1
0.2
0.3
0.4
0.5
0.6
0.7
0.8
0.9
1
test = 1
test = -1
wrong classifcation
Support Vectors
function [alpha, converged, epochs] = adatron_kernel(X, y0,dif_thresh,K)
[N P] = size(X);
% if using just one set of inputs K is symmetric
eta = 1.5/max(diag(K))
Q = (y0*y0').*K;
alpha = zeros(1,size(X,2))';
alpha1 = zeros(1,size(X,2))';
delta = Q*alpha;
tmax = 1000000;
% eta = 1; %This is the learning rate
% w = zeros(1,N);
%y = w*x;
dif = dif_thresh*10;
%alpha1 = alpha;
converged = 0;
epochs = 0;
%figure;
%scatter(x(1,:),x(2,:),30,y0+2,'filled')
%delta = ( y0(1) - y(1) ) / 2;
% figure;
while (dif>dif_thresh) && (epochs<tmax)
dif = 0;
%
d = y0(i+1)-sign(y(i+1));
% I used here "i+1" because of the mod
function
%
w = w + eta*d*x(:,i+1)';
for mu = 1:P
d_alpha = max(-alpha(mu), eta*(1-delta(mu)) );
alpha(mu) = alpha(mu) + d_alpha;
% alpha(mu) = alpha(mu) + max(-alpha(mu), eta*(1-delta(mu)) );
delta = delta + Q(:,mu)*d_alpha; % delta = Q*alpha;
dif = max(dif,abs(alpha(mu)-alpha1(mu)));
end
% alpha(alpha< 10^-4) = 0;
nonzero_alpha = sum(sum(alpha>10^-4))
epochs = epochs+1;
%bar(1:1000,alpha);
%plot(1:length(alpha),alpha);
%pause(0.1)
alpha1 = alpha;
end
%y = w*x;
%sum(abs(y'-y0))
% The equation of the classifying line:
% w1*x1 + w2*x2 + w3 = 0
% x2 = -w3/w2 -w1/w2*x1
%
%
%
%
%
%
%
%
%
%
%
if j == 0
clf;
scatter(x(1,:),x(2,:),30,y0+2,'filled');
end
linex1 = [-1 1];
linex2 =-w(1)/w(2).*linex1;
hold on;
plot(linex1,linex2)
pause(0.1)
%dif = sum(abs(y0'-sign(y)));
if epochs<tmax
converged=1;
end;
eta
function K = calc_kernel(X1,X2,kernel,par)
switch kernel
case 'linear'
K = X2'*X1;
case 'poly'
d = par;
K = (X2'*X1 + 1).^d;
case 'gaussian'
sigma = par;
X1_rep = repmat(X1,[1 1 length(X2)]);
X2_rep = repmat(reshape(X2, [size(X2,1) 1 size(X2,2)]), [1 length(X1)
1]);
X21 = (X2_rep - X1_rep).^2;
X21 = squeeze(sum(X21));
K = exp(X21'/(-2*sigma^2));
%
% checking:
%
K(30,75)
%
exp(norm(X(:,30) - X(:,75)).^2/(-2*sigma^2))
end
© Copyright 2025 Paperzz