% Demonstration of a very simple single layer perceptron learning for 2D % data % Coded by Md. Iftekhar Tanveer (go2chayan@gmail.com) % http://www.itanveer.com % Modified by Silvio Simani 26/10/2021 % clear all, close all, clc example = 1; % Normal dataset %example = 2; % Normal dataset with some other y %example = 3; % Normal dataset where some -1's are gone %example = 4; % Normal dataset with added mass eta = 0.7; % Learning rate n_iter = 0; % Normal Dataset if(example == 1), X = [0,0;0,1;0,2;0,3;0,4;1,0;1,1;1,2;1,3;1,4;2,0;2,1;2,2;2,3;2,4;3,0;3,1;3,2;3,3;3,4; 4,0;4,1;4,2;4,3;4,4]; d = [-1;-1;-1;-1;-1;+1;-1;-1;-1;-1;+1;+1;-1;-1;-1;+1;+1;+1;-1;-1;+1;+1;+1;+1;-1]; end; % Some other y if(example == 2), X = [0,0;0,1;0,2;0,3;0,4;1,0;1,1;1,2;1,3;1,4;2,0;2,1;2,2;2,3;2,4;3,0;3,1;3,2;3,3;3,4; 4,0;4,1;4,2;4,3;4,4]; d = [+1;+1;-1;-1;-1;+1;+1;-1;-1;-1;+1;+1;-1;-1;-1;+1;+1;-1;-1;-1;+1;+1;-1;-1;-1;]; end; % Some -1's are gone if(example == 3), X = [0,0;0,1;0,2;1,0;1,1;1,2;1,3;2,0;2,1;2,2;2,3;2,4;3,0;3,1;3,2;3,3;3,4; 4,0;4,1;4,2;4,3;4,4]; d = [-1;-1;-1;+1;-1;-1;-1;+1;+1;-1;-1;-1;+1;+1;+1;-1;-1;+1;+1;+1;+1;-1]; end; % With added mass if(example == 4), X = [0,0;0,1;0,2;0,3;0,4;1,0;1,1;1,2;1,3;1,4;2,0;2,1;2,2;2,3;2,4;3,0;3,1;3,2;3,3;3,4; 4,0;4,1;4,2;4,3;4,4;0,-6;1,-6;2,-6;3,-6;4,-6;5,-6;6,-6]; d = [-1;-1;-1;-1;-1;+1;-1;-1;-1;-1;+1;+1;-1;-1;-1;+1;+1;+1;-1;-1;+1;+1;+1;+1;-1; +1;+1;+1;+1;+1;+1;+1]; end; % Adding column of 1's in X for y-intercept of the line X = [X,ones(length(d),1)]; % Least Square %w = X\d; w = [0,0,0]; clf while(sum(sign(w*X')~=d')>0) for i = 1:length(d) scatter(X(:,1),X(:,2),[],d,'filled') hold on % Perceptron if(sign(w*X(i,:)')~=d(i)), %w = w + X(i,:)*d(i); w = w - eta*(sign(w*X(i,:)') - d(i))*X(i,:); % Learning rule n_iter = n_iter + 1; end strEq = sprintf('%f*x + %f*y + %f = 0',w(1),w(2),w(3)); xx = -2:0.01:6; yy = -(w(1)/w(2))*xx - (w(3)/w(2)); scatter(xx,yy,'.'); hold off title(strEq); drawnow pause(0.3) end end n_iter