attachment:GLM1_Nov2018.m of SignalAnalysisMatlabSchedule - Methods
location: attachment:GLM1_Nov2018.m of SignalAnalysisMatlabSchedule

Attachment 'GLM1_Nov2018.m'

Download

   1 %% Introduction to signal analysis in Matlab: GLM I
   2 % CBU workshop, Ed Sohoglu Nov 2018
   3 
   4 %% Regression
   5 clearvars
   6 rng(1) % for reproducability- code below will generate 'random' numbers in a predictable way
   7 
   8 nReg = 1; % number of regressors
   9 nSubj = 10; % number of subjects
  10 
  11 % make design matrix
  12 age = randi([20 60],nSubj,1); % make age regressor to lie within 20 and 60 years of age (for each subject)
  13 X = age; % X = design matrix
  14 X = [X ones(size(X,1),1)]; % add constant
  15 
  16 % make data using GLM equation Y = B1*X1 + B2*X2 + B3*X3 etc. + Constant + Error 
  17 % here beta 1 represents influence of age covariate and beta 2 is the constant
  18 Y = 3*X(:,1) + 500 + 20*randn(nSubj,1);
  19 
  20 % plot data and design matrix
  21 figure;
  22 subplot(1,2,1); scatter(age,Y);
  23 subplot(1,2,2); imagesc(X); colormap(gray); colorbar;
  24 
  25 % compute betas
  26 betas = pinv(X)*Y;
  27 
  28 % error sum of squares
  29 prediction = X*betas;
  30 res = Y-prediction;
  31 SS_error = sum((res-mean(res)).^2);
  32 subplot(1,2,1); hold on; plot(age,prediction); hold off % plot data and prediction
  33 
  34 % effect sum of squares
  35 prediction = X(:,1:nReg)*betas(1:nReg); % Only use regressor/group regressors (ignore covariates and constant terms) 
  36 SS_effect = sum((prediction-mean(prediction)).^2);
  37 
  38 % compute mean squares
  39 df_effect = nReg;
  40 df_error = size(Y,1)-df_effect-1;
  41 MS_effect = SS_effect./df_effect;
  42 MS_error = SS_error/df_error;
  43 
  44 % compute F statistic, p-value and R2
  45 F = MS_effect./MS_error;
  46 p = fcdf(F,df_effect,df_error,'upper'); % requires statistics tooolbox
  47 
  48 SS_total = sum((Y-mean(Y)).^2);
  49 R2 = SS_effect/SS_total; % R2 is the proportion of variance explained (take square root to compute correlation coefficient)
  50 
  51 %% One-way ANOVA
  52 clearvars
  53 rng(1) % for reproducability- code below will generate 'random' numbers in a predictable way
  54 
  55 nReg = 3; % number of regressors (groups)
  56 nSubj = 10; % number of subjects per group
  57 
  58 % make design matrix
  59 X = kron(eye(nReg),ones(nSubj,1)); % X = design matrix
  60 %X = [X ones(size(X,1),1)]; % could add constant. If so, betas would represent 
  61 % differences from a global mean. But importantly, this wouldn't change
  62 % group DIFFERENCES (what we are interested in)
  63 
  64 % make data using GLM equation Y = B1*X1 + B2*X2 + B3*X3 etc. + Error  
  65 % here betas 1 to 3 represent means for the three groups
  66 Y = 500*X(:,1) + 700*X(:,2) + 600*X(:,3) + 20*randn(nReg*nSubj,1);
  67 
  68 % plot data and design matrix
  69 figure;
  70 subplot(1,2,1); scatter(1:nSubj*nReg,Y);
  71 subplot(1,2,2); imagesc(X); colormap(gray); colorbar;
  72 
  73 % compute betas
  74 betas = pinv(X)*Y;
  75 
  76 % error sum of squares
  77 prediction = X*betas;
  78 res = Y-prediction;
  79 SS_error = sum((res-mean(res)).^2);
  80 subplot(1,2,1); hold on; plot(1:nSubj*nReg,prediction); hold off % plot data and prediction
  81 
  82 % effect sum of squares
  83 prediction = X(:,1:nReg)*betas(1:nReg); % ignore constant terms 
  84 SS_effect = sum((prediction-mean(prediction)).^2);
  85 
  86 % compute mean squares
  87 df_effect = nReg-1;
  88 df_error = size(Y,1)-df_effect-1;
  89 MS_effect = SS_effect./df_effect;
  90 MS_error = SS_error/df_error;
  91 
  92 % compute F statistic, p-value and R2
  93 F = MS_effect./MS_error;
  94 p = fcdf(F,df_effect,df_error,'upper'); % requires statistics tooolbox
  95 
  96 SS_total = sum((Y-mean(Y)).^2);
  97 R2 = SS_effect/SS_total; % R2 is the proportion of variance explained (take square root to compute correlation coefficient)
  98 
  99 %% One-way repeated measures ANOVA
 100 clearvars
 101 rng(1) % for reproducability- code below will generate 'random' numbers in a predictable way
 102 
 103 nReg = 3; % number of regressors (conditions)
 104 nSubj = 10; % number of subjects per condition
 105 
 106 % make design matrix
 107 X = kron(eye(nReg),ones(nSubj,1)); % X = design matrix
 108 X = [X kron(ones(nReg,1),eye(nSubj))]; % add constant for each subject
 109 
 110 % make data using GLM equation Y = B1*X1 + B2*X2 + B3*X3 etc. + Error   
 111 % here betas 1 to 3 represent means for the three conditions, betas 4 to 13
 112 % are the subject-specific constants (which we want to "regress out")
 113 subject_means = 200*randn(nSubj,1)+500;
 114 subject_means = kron(ones(nReg,1),subject_means);
 115 Y = 300*X(:,1) + 200*X(:,2) + 400*X(:,3) + subject_means + 20*randn(nReg*nSubj,1);
 116 
 117 % plot data and design matrix
 118 Y2plot = reshape(Y,nSubj,nReg);
 119 figure; subplot(1,2,1); hold on
 120 for i=1:nReg
 121     scatter(1:nSubj,Y2plot(:,i));
 122 end
 123 hold off
 124 subplot(1,2,2); imagesc(X); colormap(gray); colorbar;
 125 
 126 % compute betas
 127 betas = pinv(X)*Y;
 128 
 129 % error sum of squares
 130 prediction = X*betas;
 131 res = Y-prediction;
 132 SS_error = sum((res-mean(res)).^2);
 133 prediction2plot = reshape(prediction,nSubj,nReg);
 134 subplot(1,2,1); hold on; ax = gca; ax.ColorOrderIndex = 1; plot(1:nSubj,prediction2plot); hold off % plot data and prediction
 135 
 136 % effect sum of squares
 137 prediction = X(:,1:nReg)*betas(1:nReg); % ignore constant terms
 138 SS_effect = sum((prediction-mean(prediction)).^2);
 139 
 140 % compute mean squares
 141 df_effect = nReg-1;
 142 df_error = size(Y,1)-df_effect-nSubj;
 143 MS_effect = SS_effect./df_effect;
 144 MS_error = SS_error/df_error;
 145 
 146 % compute F statistic, p-value and R2
 147 F = MS_effect./MS_error;
 148 p = fcdf(F,df_effect,df_error,'upper'); % requires statistics tooolbox
 149 
 150 SS_total = sum((Y-mean(Y)).^2);
 151 R2 = SS_effect/SS_total; % R2 is the proportion of variance explained (take square root to compute correlation coefficient)
 152 
 153 %% ANCOVA
 154 clearvars
 155 rng(1) % for reproducability- code below will generate 'random' numbers in a predictable way
 156 
 157 % In this exercise, you will combine what you learned from the Regression
 158 % and One-way ANOVA examples to build an ANCOVA model.
 159 % You will need to generate the RT data, arranged as three groups of subjects (N = 10 per group, similar to one-way ANOVA example above.
 160 % You will also need to generate the age covariate (similar to Regression example above)
 161 % Use ANCOVA to test for the group effect, after controlling for the influence of age.
 162 
 163 % NB- as the regressors are correlated, models need to be estimated
 164 % differently to obtain the correct F statistics. Here I don't do
 165 % this for simplicity but in alternative version below I do.
 166 
 167 nReg = 3; % number of (group) regressors
 168 nSubj = 10; % per group
 169 
 170 % make design matrix
 171 X = kron(eye(nReg),ones(nSubj,1)); % X = design matrix
 172 age = randi([20 60],nSubj*3,1);
 173 X = [X age]; % Add age covariate
 174 X = [X ones(size(X,1),1)]; % add constant
 175 
 176 % make data using GLM equation Y = B1*X1 + B2*X2 + B3*X3 etc. + Error   
 177 % here betas 1 to 3 represent means for the three groups, beta 4 is the age
 178 % covariate
 179 Y = 500*X(:,1) + 700*X(:,2) + 600*X(:,3) + 3*X(:,4) + 20*randn(nReg*nSubj,1);
 180 
 181 % plot data and design matrix
 182 figure;
 183 subplot(1,2,1); scatter(1:nSubj*nReg,Y);
 184 subplot(1,2,2); imagesc(X); colormap(gray); colorbar;
 185 
 186 % compute betas
 187 betas = pinv(X)*Y;
 188 
 189 % error sum of squares
 190 prediction = X*betas;
 191 res = Y-prediction;
 192 SS_error = sum((res-mean(res)).^2);
 193 subplot(1,2,1); hold on; plot(1:nSubj*nReg,prediction); hold off % plot data and prediction
 194 
 195 % effect sum of squares
 196 prediction = X(:,1:nReg)*betas(1:nReg); % ignore constant terms
 197 SS_effect = sum((prediction-mean(prediction)).^2);
 198 
 199 % compute mean squares
 200 df_effect = nReg-1;
 201 df_error = size(Y,1)-df_effect-2;
 202 MS_effect = SS_effect./df_effect;
 203 MS_error = SS_error/df_error;
 204 
 205 % compute F statistic, p-value and R2
 206 F = MS_effect./MS_error;
 207 p = fcdf(F,df_effect,df_error,'upper'); % requires statistics tooolbox
 208 
 209 %% ANCOVA (ALT VERSION, correct F statistic)
 210 clearvars
 211 rng(1) % for reproducability- code below will generate 'random' numbers in a predictable way
 212 
 213 % In this exercise, you will combine what you learned from the Regression
 214 % and One-way ANOVA examples to build an ANCOVA model.
 215 % You will need to generate the RT data, arranged as three groups of subjects (N = 10 per group, similar to one-way ANOVA example above.
 216 % You will also need to generate the age covariate (similar to Regression example above)
 217 % Use ANCOVA to test for the group effect, after controlling for the influence of age.
 218 
 219 nReg = 3; % number of (group) regressors
 220 nSubj = 10; % per group
 221 
 222 % make design matrix
 223 X = kron(eye(nReg),ones(nSubj,1)); % X = design matrix
 224 age = randi([20 60],nSubj*3,1);
 225 X = [X age]; % Add age covariate
 226 X = [X ones(size(X,1),1)]; % add constant (need this for this version!)
 227 
 228 % make data using GLM equation Y = B1*X1 + B2*X2 + B3*X3 etc. + Error   
 229 % here betas 1 to 3 represent means for the three groups, beta 4 is the age
 230 % covariate
 231 Y = 500*X(:,1) + 700*X(:,2) + 600*X(:,3) + 3*X(:,4) + 20*randn(nReg*nSubj,1);
 232 
 233 % plot data and design matrix
 234 figure;
 235 subplot(1,2,1); scatter(1:nSubj*nReg,Y);
 236 subplot(1,2,2); imagesc(X); colormap(gray); colorbar;
 237 
 238 % compute betas
 239 betas = pinv(X)*Y;
 240 
 241 % error sum of squares
 242 prediction = X*betas;
 243 res = Y-prediction;
 244 SS_error = sum((res-mean(res)).^2);
 245 subplot(1,2,1); hold on; plot(1:nSubj*nReg,prediction); hold off % plot data and prediction
 246 
 247 % effect sum of squares (compute by working out the increase in error that
 248 % occurs when omitting the effect of interest from model)
 249 X_reduced = X(:,[4 5]); % For assessing effect of group
 250 %X_reduced = X(:,[1:3 5]); % For assessing effect of covariate (but remember to change df_effect to 1)
 251 betas_reduced = pinv(X_reduced)*Y;
 252 prediction = X_reduced*betas_reduced;
 253 res = Y-prediction;
 254 SS_effect = sum((res-mean(res)).^2)-SS_error;
 255 
 256 % compute mean squares
 257 df_effect = nReg-1;
 258 df_error = size(Y,1)-df_effect-2;
 259 MS_effect = SS_effect./df_effect;
 260 MS_error = SS_error/df_error;
 261 
 262 % compute F statistic, p-value and R2
 263 F = MS_effect./MS_error;
 264 p = fcdf(F,df_effect,df_error,'upper'); % requires statistics tooolbox
 265 
 266 %% Two-way (2x2) ANOVA
 267 clearvars
 268 rng(1) % for reproducability- code below will generate 'random' numbers in a predictable way
 269 
 270 % In this exercise, you can try a more complex two-way (2x2) anova.
 271 % Try to assess each of the three effects in this design
 272 % (main effect A, main effect B and AxB interaction)
 273 % Tip: each factor (column) in the design matrix will need to code for 
 274 % differences between the levels of that factor (e.g. for factor A: kron([1 1 -1 -1]',ones(nSubj,1))').
 275 % The column for the interaction can be formed by element-wise
 276 % multiplication of factors A and B.
 277 
 278 nReg = 4; % number of (group) regressors
 279 nSubj = 10; % number of subjects per group
 280 chooseFactor = 3; % which factor do you want to assess? (1 for A, 2 for B, 3 for interaction)
 281 
 282 % make design matrix
 283 A = kron([1 1 -1 -1]',ones(nSubj,1)); % Factor A
 284 B = kron([1 -1 1 -1]',ones(nSubj,1)); % Factor B
 285 AB = A.*B; % AxB interaction
 286 X = [A B AB];
 287 %X = [X ones(size(X,1),1)]; % add constant
 288 
 289 % make data using GLM equation Y = B1*X1 + B2*X2 + B3*X3 etc. + Error   
 290 % here betas 1 to 4 represent means for the four groups (2x2 design)
 291 Y = 500*X(:,1) + 700*X(:,2) + 600*X(:,3) + 20*randn(nReg*nSubj,1);
 292 
 293 % plot data and design matrix
 294 Y2plot = reshape(Y,nSubj,nReg);
 295 figure; subplot(1,2,1); hold on
 296 for i=1:nReg
 297     scatter(1:nSubj,Y2plot(:,i));
 298 end
 299 hold off
 300 subplot(1,2,2); imagesc(X); colormap(gray); colorbar;
 301 
 302 % compute betas
 303 betas = pinv(X)*Y;
 304 
 305 % error sum of squares
 306 prediction = X*betas;
 307 res = Y-prediction;
 308 SS_error = sum((res-mean(res)).^2);
 309 prediction2plot = reshape(prediction,nSubj,nReg);
 310 subplot(1,2,1); hold on; ax = gca; ax.ColorOrderIndex = 1; plot(1:nSubj,prediction2plot); hold off % plot data and prediction
 311 
 312 % effect sum of squares
 313 prediction = X(:,chooseFactor)*betas(chooseFactor); % only include factor you want to assess
 314 SS_effect = sum((prediction-mean(prediction)).^2);
 315 
 316 % compute mean squares
 317 df_effect = 1; % Degrees of freedom for the factor under consideration
 318 df_error = size(Y,1)-(nReg-1)-1; % But for error degrees of freedom, use full model
 319 MS_effect = SS_effect./df_effect;
 320 MS_error = SS_error/df_error;
 321 
 322 % compute F statistic, p-value and R2
 323 F = MS_effect./MS_error;
 324 p = fcdf(F,df_effect,df_error,'upper'); % requires statistics tooolbox
 325 
 326 SS_total = sum((Y-mean(Y)).^2);
 327 R2 = SS_effect/SS_total; % R2 is the proportion of variance explained (take square root to compute correlation coefficient)

Attached Files

To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.
  • [get | view] (2018-11-15 14:50:04, 2556.1 KB) [[attachment:ArcherBoyd+Guerit_vocoder_Nov18.zip]]
  • [get | view] (2019-11-21 17:21:33, 1661.8 KB) [[attachment:CBU_FunctionsVocoder_22Nov19.zip]]
  • [get | view] (2019-11-19 16:53:57, 4.6 KB) [[attachment:Code_Optimisation_20Nov19.zip]]
  • [get | view] (2016-03-09 10:59:16, 259.0 KB) [[attachment:DP_Optimisation_2016.zip]]
  • [get | view] (2015-01-20 17:22:38, 614.9 KB) [[attachment:EMEGdata.mat]]
  • [get | view] (2016-02-03 12:32:14, 26.8 KB) [[attachment:ExampleScripts.zip]]
  • [get | view] (2018-01-12 13:39:53, 3150.4 KB) [[attachment:Filtering&Oscillations_ForUpload_TallieJan18.pdf]]
  • [get | view] (2018-07-19 14:49:35, 2.5 KB) [[attachment:Functions and Calculus.m]]
  • [get | view] (2018-07-19 14:49:30, 433.8 KB) [[attachment:Functions and Calculus.pdf]]
  • [get | view] (2015-02-02 09:27:07, 522.0 KB) [[attachment:GLM1.pdf]]
  • [get | view] (2019-11-19 13:06:58, 790.4 KB) [[attachment:GLM1_19Nov19.pdf]]
  • [get | view] (2019-11-19 13:07:11, 2.5 KB) [[attachment:GLM1_19Nov19.zip]]
  • [get | view] (2018-01-12 13:00:28, 9.0 KB) [[attachment:GLM1_Jan2018.m]]
  • [get | view] (2018-01-12 13:02:14, 354.1 KB) [[attachment:GLM1_Jan2018.pdf]]
  • [get | view] (2018-11-13 11:34:38, 11.5 KB) [[attachment:GLM1_Nov2018.m]]
  • [get | view] (2018-11-13 11:34:31, 1417.1 KB) [[attachment:GLM1_Nov2018.pptx]]
  • [get | view] (2018-11-13 12:07:58, 11.5 KB) [[attachment:GLM1_Nov2018_ES18.m]]
  • [get | view] (2018-11-13 12:07:52, 271.1 KB) [[attachment:GLM1_Nov2018_ES18.pdf]]
  • [get | view] (2015-02-02 09:27:27, 2.1 KB) [[attachment:GLM1_script.m]]
  • [get | view] (2016-02-11 18:26:35, 560.4 KB) [[attachment:GLM_part1.pdf]]
  • [get | view] (2018-01-08 09:49:09, 8.5 KB) [[attachment:GLM_workshop.m]]
  • [get | view] (2015-02-18 12:51:09, 580.2 KB) [[attachment:IntroCalculus 18Feb15.pdf]]
  • [get | view] (2015-03-02 18:21:14, 4.3 KB) [[attachment:IntroFilteringOscillations_25Feb15.zip]]
  • [get | view] (2016-02-18 15:52:26, 237.9 KB) [[attachment:IntroGLM2_17Feb16.pdf]]
  • [get | view] (2015-02-04 11:58:10, 125.6 KB) [[attachment:IntroGLM2_4Feb15.pdf]]
  • [get | view] (2016-02-04 17:19:34, 445.3 KB) [[attachment:IntroMatrixAlgebra.pdf]]
  • [get | view] (2015-03-04 12:00:10, 1.3 KB) [[attachment:IntroOptimisation_4Mar15.zip]]
  • [get | view] (2018-01-09 12:44:37, 19.3 KB) [[attachment:Matlab_Files.zip]]
  • [get | view] (2015-01-20 17:29:01, 6.1 KB) [[attachment:MatrixAlgebra.m]]
  • [get | view] (2018-11-13 12:08:14, 917.4 KB) [[attachment:Matrix_Algebra_AT18.pdf]]
  • [get | view] (2015-03-02 18:19:54, 260.9 KB) [[attachment:Olaf FilteringOscillations 25Feb15.pdf]]
  • [get | view] (2015-02-18 12:51:00, 1.8 KB) [[attachment:Olaf IntroCalculus 18Feb15 Matlab.zip]]
  • [get | view] (2016-02-18 15:52:39, 2.9 KB) [[attachment:Olaf IntroGLM2 17Feb2016.m]]
  • [get | view] (2015-02-04 11:58:19, 2.9 KB) [[attachment:Olaf IntroGLM2 4Feb15.m]]
  • [get | view] (2015-03-04 12:00:04, 113.0 KB) [[attachment:Olaf Optimisation 4Mar15.pdf]]
  • [get | view] (2015-01-16 13:56:32, 3.2 KB) [[attachment:Sampling.m]]
  • [get | view] (2015-01-16 13:56:14, 754.0 KB) [[attachment:Sampling.pdf]]
  • [get | view] (2016-02-04 17:19:23, 1107.2 KB) [[attachment:SignalSamplingNoise.pdf]]
  • [get | view] (2018-11-13 12:08:19, 1479.3 KB) [[attachment:Signal_sampling_noise_AT18.pdf]]
  • [get | view] (2018-01-11 09:31:52, 259.2 KB) [[attachment:optimisation.zip]]
 All files | Selected Files: delete move to page copy to page

You are not allowed to attach a file to this page.