
%% basics of linear equations

% examples of linear equations

% invertible
M1 = [1 0; -1 1]
y = [1 2]'
% solution
inv(M1)*y

% non-invertible
M2 = [1 -1; -1 1]
inv(M2)*y  % doesn't work

% invertible but possibly instable
M3 = [1 -1; -1 1.1]
inv(M3)*y

% What does that mean for the effect of noise?
n = randn(2,10); % matrix with random noise
inv(M1)*n
inv(M3)*n


%% rank
rank(M1)
rank(M2)
rank(M3)

% What is the result of rank( zeros(1000) )? rank( eye(5) )?
% Does the rank change when multiplying the matrix with a scalar?

%% Basic SVD

% get singular values for above matrices, compute "condition numbers"

s(1)/s(2)

s = svd(M2)
s(1)/s(2)

s = svd(M3)
s(1)/s(2)

%% SVD for square matrix

M = eye(2)
[U,S] = svd(M)

[U,S] = svd(M1)

[U,S] = svd(M2)

[U,S] = svd(M3)

% the vectors in U are orthonormal (orthogonal and unit length)
U'*U

%% SVD for rectangular matrix
M = [ [2 2 0]', [0 0 1]' ]
[U,S,V] = svd(M)

% check whether decomposition works out
U*S*V'


%% Why is this useful - SVD as a filter
% make some data
x = [-2*pi:0.01:2*pi];
data = [sin(x)' cos(x)'];

plot(data)

% replicate this matrix
data = repmat(data, 1, 10);
whos

% add noise
data = data + randn(size(data));
plot(data)

% 'economy size' svd
[U,S,V] = svd( data, 0 );

% plot eigenvalues
sdiag = diag(S)
figure
plot( sdiag, 'x' )
% if you want to plot variances
svar = 100 * sdiag.^2 / sum( sdiag.^2 )
plot(svar, 'x')

% leave only first two SVD components
sdiag2 = sdiag;
sdiag2(3:end) = 0
S2 = diag(sdiag2);

% reassamble filtered data
data2 = U*S2*V';
figure
plot(data2)

%% Relationship PCA and SVD

% The Matlab command princomp removes means from columns,
% which differs from svd()
% Therefore, let's start with a column-centred matrix

X = [1 1 -2; 1 -2 1; -2 1 1]
mean(X)
% what's the rank of this matrix?

% PCA
[A,B,C] = princomp(X)

% SVD
[U,S,V] = svd(X)

% note: a and v are the same, because for princomp rows are observations

% s and c should contain the same values, but c is half of s-squared
% reason: s-square is the "raw" variance, and princomp divides by the
% degrees of freedom (here: 2)

%% Why "eigen"vectors?
% when you stick eigenvectors into a covariance matrix...
(M*M')*U

% you get the vectors back, multiplied by square of eigenvalues
U*S.^2

% same for V
(M'*M)*V
S.^2*V

%% SVD to compute pseudoinverse

% get M4 from above
M4 = [-0.1 0.1; 1 1];
[U,S,V] = svd( M4 )

% invert the eigenvalues
S(1,1) = 1/S(1,1)
S(2,2) = 1/S(2,2)

% compute the inverse
myinv = V*S*U'

% check whether it's correct
inv(M4)

% "dampen" the effect of small eigenvalues
S(2,2) = 5

% see how this affects the inverse
myinv2 = V*S*U'

% apply to some "data" vector y and note the difference
y = [1 1]'

myinv*y
myinv2*y

% the data that these solutions would produce
M4*myinv*y      % exact
M4*myinv2*y     % approximate


