
%% basics of linear equations

% examples of linear equations

% invertible
M1 = [1 0; -1 1]
y = [1 2]'
% solution
inv(M1)*y

% non-invertible
M2 = [1 -1; -1 1]
inv(M2)*y  % doesn't work

% invertible but possibly instable
M3 = [1 -1; -1 1.1]
inv(M3)*y

%% rank
rank(M1)
rank(M2)
rank(M3)

% What is the result of rank( zeros(1000) )?
% Does the rank change when multiplying the matrix with a scalar?

%% SVD for square matrix
M4 = [-0.5 0.5; 1 1];
[U,S] = svd( M4 )
% condition number
S(1,1)/S(2,2)

% change the similarity of columns and see what happens
M4 = [-0.1 0.1; 1 1];
[U,S] = svd( M4 )
% condition number
S(1,1)/S(2,2)

M4 = [-1 1; 1 1];
[U,S] = svd( M4 )
% condition number
S(1,1)/S(2,2)

% the vectors in U are orthonormal (orthogonal and unit length)
U'*U

%% SVD for rectangular matrix
M = [ [2 2 0]', [0 0 1]' ]
[U,S,V] = svd(M)

% check whether decomposition works out
U*S*V'

%% Why "eigen"vectors?
% when you stick eigenvectors into a covariance matrix...
(M*M')*U

% you get the vectors back, multiplied by square of eigenvalues
U*S.^2

% same for V
(M'*M)*V
S.^2*V

%% Why is this useful - SVD as a filter
% make some data
x = [-2*pi:0.01:2*pi];
data = [sin(x)' cos(x)'];

plot(data)

% replicate this matrix
data = repmat(data, 1, 10);
whos

% add noise
data = data + randn(size(data));
plot(data)

% 'economy size' svd
[U,S,V] = svd( data, 0 );

% plot eigenvalues
sdiag = diag(S)
figure
plot( sdiag, 'x' )
% if you want to plot variances
svar = 100 * sdiag.^2 / sum( sdiag.^2 )
plot(svar, 'x')

% leave only first two SVD components
sdiag2 = sdiag;
sdiag2(3:end) = 0
S2 = diag(sdiag2);

% reassamble filtered data
data2 = U*S2*V';
figure
plot(data2)

%% Why is this useful - SVD to compute pseudoinverse

% get M4 from above
M4 = [-0.1 0.1; 1 1];
[U,S,V] = svd( M4 )

% invert the eigenvalues
S(1,1) = 1/S(1,1)
S(2,2) = 1/S(2,2)

% compute the inverse
myinv = V*S*U'

% check whether it's correct
inv(M4)

% "dampen" the effect of small eigenvalues
S(2,2) = 5

% see how this affects the inverse
myinv2 = V*S*U'

% apply to some "data" vector y and note the difference
y = [1 1]'

myinv*y
myinv2*y

% the data that these solutions would produce
M4*myinv*y      % exact
M4*myinv2*y     % approximate


%% Relationship PCA and SVD

% The Matlab command princomp removes means from columns,
% which differs from svd()
% Therefore, let's start with a column-centred matrix

X = [1 1 -2; 1 -2 1; -2 1 1]
mean(X)
% what's the rank of this matrix?

% PCA
[A,B,C] = princomp(X)

% SVD
[U,S,V] = svd(X)

% note: a and v are the same, because for princomp rows are observations

% s and c should contain the same values, but c is half of s-squared
% reason: s-square is the "raw" variance, and princomp divideds by the
% degrees of freedom (here: 2)





