Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions demo/demo_regression_2d.m
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,5 @@
z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+"); # adding the training data
92 changes: 92 additions & 0 deletions demo/demo_regression_2d_exkrls.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
% Showcasing Extended kernel RLS
%
% "Problem" with this implementation is that is does not update anymore
% once number of presented data points exceed M.

close all
clear

%% PARAMETERS

algorithm = 'exkrls'; % algorithm class (choose from lib/ folder)
opts = struct(
"alphaf", .999, % state forgetting factor, "alpha" in publication
"beta", .995, % data forgetting factor
"lambda", 1E-2, % regularization parameter
"q", 1E-3, % trade-off between modeling variation and measurement disturbance
"M", 100, # dictionary size # not necessarily the lookback
# "kerneltype", 'gauss'
"kerneltype", 'linear' # can only fit a plane

); % algorithm options go here (kernel type, parameters, etc)

%% PROGRAM

kaf = feval(algorithm, opts); %#ok<FVAL>

% generate some data
c = 5;
N = 200;
x = rand(N,2)*c;
y = zeros(N,1);
% y = sin(3*x(:,1)).*cos(x(:,1)+x(:,2)); # non-linear surface
y(1:N/2) = 3*x(1:N/2,1)+0.5*x(1:N/2,2); # linear surface
y(N/2:N) = -1*x(N/2:N,1)-1.5*x(N/2:N,2); # linear surface changes
y = y + randn(N,1)*1.0; # add some noise

fprintf('First half of Training')
for i=1:(N/2-1)
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");

fprintf('Training shortly after Non-Stationarity')
for i=N/2:N/2+9
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");

fprintf('Training Rest')
for i=N/2+10:N
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");
90 changes: 90 additions & 0 deletions demo/demo_regression_2d_plane.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
% Linear plane regression demo.
%
% This file is part of the Kernel Adaptive Filtering Toolbox for Matlab.
% https://github.com/steven2358/kafbox

close all
clear

%% PARAMETERS

algorithm = 'krlst'; % algorithm class (choose from lib/ folder)
opts = struct(
"lambda", 0.9999, # forgetting factor
"M", 10, # dictionary size # not necessarily the lookback
"sn2", 1E-2, # noise to signal ratio (regularization parameter)
# "kerneltype", 'gauss'
"kerneltype", 'linear', # can only fit a plane
"jitter", 1E-6
); % algorithm options go here (kernel type, parameters, etc)

%% PROGRAM

kaf = feval(algorithm, opts); %#ok<FVAL>

% generate some data
c = 5;
N = 200;
x = rand(N,2)*c;
y = zeros(N,1);
% y = sin(3*x(:,1)).*cos(x(:,1)+x(:,2));
y(1:N/2) = 3*x(1:N/2,1)+0.5*x(1:N/2,2); # linear surface
y(N/2:N) = -1*x(N/2:N,1)-1.5*x(N/2:N,2); # linear surface changes
y = y + randn(N,1)*1.0; # add some noise

fprintf('First half of Training')
for i=1:(N/2-1)
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");

fprintf('Training after Non-Stationarity')
for i=N/2:N/2+2
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");

fprintf('Training Rest')
for i=N/2+3:N
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");
87 changes: 87 additions & 0 deletions demo/demo_regression_2d_swkrls.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
% Showcasing sliding window kernel RLS

close all
clear

%% PARAMETERS

algorithm = 'swkrls'; % algorithm class (choose from lib/ folder)
opts = struct(
"lambda", 0.99999, # forgetting factor
"c", 1E-2, % regularization parameter e.g. with 1E3 we see significant shrinkage
"M", 100, # dictionary size # not necessarily the lookback
# "kerneltype", 'gauss'
"kerneltype", 'linear' # can only fit a plane

); % algorithm options go here (kernel type, parameters, etc)

%% PROGRAM

kaf = feval(algorithm, opts); %#ok<FVAL>

% generate some data
c = 5;
N = 200;
x = rand(N,2)*c;
y = zeros(N,1);
% y = sin(3*x(:,1)).*cos(x(:,1)+x(:,2)); # non-linear surface
y(1:N/2) = 3*x(1:N/2,1)+0.5*x(1:N/2,2); # linear surface
y(N/2:N) = -1*x(N/2:N,1)-1.5*x(N/2:N,2); # linear surface changes
y = y + randn(N,1)*1.0; # add some noise

fprintf('First half of Training')
for i=1:(N/2-1)
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");

fprintf('Training shortly after Non-Stationarity')
for i=N/2:N/2+49
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");

fprintf('Training Rest')
for i=N/2+50:N
if ~mod(i,floor(N/10)), fprintf('.'); end
kaf.train(x(i,:),y(i));
% y_test = kaf.evaluate(x(i+1,:));
end
fprintf('\n')

%% OUTPUT

[x1,x2] = meshgrid(0:.2:c, 0:.2:c);
yt = kaf.evaluate([x1(:) x2(:)]);

z = reshape(yt,size(x1,1),size(x2,1));
figure;
surf(x1,x2,z);
hold;
plot3(x(:,1), x(:,2), y, "+");
7 changes: 4 additions & 3 deletions lib/base/base_estimator.m
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,22 @@
% This file is part of the Kernel Adaptive Filtering Toolbox for Matlab.
% https://github.com/steven2358/kafbox/

classdef base_estimator < matlab.mixin.Copyable
##classdef base_estimator < matlab.mixin.Copyable
classdef base_estimator < handle
methods
% get parameter names for the estimator
function names = get_param_names(obj)
names = fieldnames(obj);
end

% get parameters
function params = get_params(obj)
params = struct;
for fn = fieldnames(obj)'
params.(fn{1}) = obj.(fn{1});
end
end

% set parameters
function set_params(obj,params)
if (nargin > 0) % copy valid parameters
Expand Down