<?xml version="1.0" encoding="utf-8"?><!DOCTYPE article  PUBLIC '-//OASIS//DTD DocBook XML V4.4//EN'  'http://www.docbook.org/xml/4.4/docbookx.dtd'><article><articleinfo><title>COGNESTIC2024</title><revhistory><revision><revnumber>134</revnumber><date>2024-09-27 12:06:10</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>133</revnumber><date>2024-09-25 16:34:21</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>132</revnumber><date>2024-09-25 16:33:29</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>131</revnumber><date>2024-09-23 12:46:27</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>130</revnumber><date>2024-09-17 15:09:07</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>129</revnumber><date>2024-09-17 10:52:08</date><authorinitials>KshipraGurunandan</authorinitials></revision><revision><revnumber>128</revnumber><date>2024-09-17 10:41:21</date><authorinitials>KshipraGurunandan</authorinitials></revision><revision><revnumber>127</revnumber><date>2024-09-17 09:51:12</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>126</revnumber><date>2024-09-17 09:35:19</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>125</revnumber><date>2024-09-17 09:28:32</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>124</revnumber><date>2024-09-17 09:25:40</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>123</revnumber><date>2024-09-09 20:04:08</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>122</revnumber><date>2024-09-09 15:39:15</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>121</revnumber><date>2024-09-09 15:30:37</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>120</revnumber><date>2024-09-09 15:28:57</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>119</revnumber><date>2024-08-01 15:08:14</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>118</revnumber><date>2024-07-30 16:07:19</date><authorinitials>RikHenson</authorinitials></revision><revision><revnumber>117</revnumber><date>2024-07-24 16:45:03</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>116</revnumber><date>2024-07-24 16:15:55</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>115</revnumber><date>2024-07-19 10:21:25</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>114</revnumber><date>2024-07-19 10:20:44</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>113</revnumber><date>2024-07-19 10:18:28</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>112</revnumber><date>2024-07-18 15:53:25</date><authorinitials>KshipraGurunandan</authorinitials></revision><revision><revnumber>111</revnumber><date>2024-07-18 10:04:07</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>110</revnumber><date>2024-07-18 10:03:25</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>109</revnumber><date>2024-07-17 16:11:19</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>108</revnumber><date>2024-07-17 16:10:42</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>107</revnumber><date>2024-07-17 15:41:56</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>106</revnumber><date>2024-07-17 15:41:48</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>105</revnumber><date>2024-07-17 15:38:16</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>104</revnumber><date>2024-07-17 15:35:33</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>103</revnumber><date>2024-07-17 15:31:59</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>102</revnumber><date>2024-07-17 15:29:18</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>101</revnumber><date>2024-07-17 15:23:36</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>100</revnumber><date>2024-07-16 16:27:55</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>99</revnumber><date>2024-07-16 16:27:07</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>98</revnumber><date>2024-07-16 16:26:44</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>97</revnumber><date>2024-07-16 16:25:38</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>96</revnumber><date>2024-07-16 16:21:07</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>95</revnumber><date>2024-07-16 16:18:42</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>94</revnumber><date>2024-07-16 15:31:47</date><authorinitials>PetarRaykov</authorinitials></revision><revision><revnumber>93</revnumber><date>2024-07-15 16:38:34</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>92</revnumber><date>2024-07-15 16:32:39</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>91</revnumber><date>2024-07-15 16:29:09</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>90</revnumber><date>2024-07-15 15:28:21</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>89</revnumber><date>2024-07-15 15:26:31</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>88</revnumber><date>2024-07-15 15:18:29</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>87</revnumber><date>2024-07-15 14:45:52</date><authorinitials>DaceApsvalka</authorinitials></revision><revision><revnumber>86</revnumber><date>2024-07-15 13:06:35</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>85</revnumber><date>2024-07-15 13:05:45</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>84</revnumber><date>2024-07-14 09:54:35</date><authorinitials>KshipraGurunandan</authorinitials></revision><revision><revnumber>83</revnumber><date>2024-07-12 17:11:19</date><authorinitials>KshipraGurunandan</authorinitials></revision><revision><revnumber>82</revnumber><date>2024-07-12 08:45:16</date><authorinitials>KshipraGurunandan</authorinitials></revision><revision><revnumber>81</revnumber><date>2024-07-12 08:40:45</date><authorinitials>KshipraGurunandan</authorinitials></revision><revision><revnumber>80</revnumber><date>2024-07-10 11:43:21</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>79</revnumber><date>2024-07-10 11:40:13</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>78</revnumber><date>2024-07-10 11:36:26</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>77</revnumber><date>2024-07-10 11:35:04</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>76</revnumber><date>2024-07-10 11:33:55</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>75</revnumber><date>2024-07-10 11:32:19</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>74</revnumber><date>2024-07-10 11:28:32</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>73</revnumber><date>2024-07-10 11:26:50</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>72</revnumber><date>2024-07-10 11:22:15</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>71</revnumber><date>2024-07-04 10:57:17</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>70</revnumber><date>2024-07-04 10:56:26</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>69</revnumber><date>2024-07-04 10:54:32</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>68</revnumber><date>2024-07-04 10:51:51</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>67</revnumber><date>2024-07-04 09:15:20</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>66</revnumber><date>2024-07-04 09:14:39</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>65</revnumber><date>2024-07-04 09:07:48</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>64</revnumber><date>2024-07-04 09:05:36</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>63</revnumber><date>2024-07-04 09:04:39</date><authorinitials>MartaCorreia</authorinitials></revision><revision><revnumber>62</revnumber><date>2024-07-04 08:54:21</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>61</revnumber><date>2024-07-04 08:52:43</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>60</revnumber><date>2024-07-04 08:51:41</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>59</revnumber><date>2024-07-03 09:45:53</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>58</revnumber><date>2024-07-03 09:34:59</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>57</revnumber><date>2024-07-03 09:33:39</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>56</revnumber><date>2024-07-03 09:32:13</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>55</revnumber><date>2024-07-03 09:30:12</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>54</revnumber><date>2024-07-03 09:28:44</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>53</revnumber><date>2024-07-03 09:27:55</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>52</revnumber><date>2024-07-03 09:11:03</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>51</revnumber><date>2024-07-03 09:07:19</date><authorinitials>DannyMitchell2</authorinitials></revision><revision><revnumber>50</revnumber><date>2024-07-01 09:21:25</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>49</revnumber><date>2024-07-01 09:10:46</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>48</revnumber><date>2024-06-12 15:24:05</date><authorinitials>RikHenson</authorinitials></revision><revision><revnumber>47</revnumber><date>2024-06-12 15:20:43</date><authorinitials>RikHenson</authorinitials></revision><revision><revnumber>46</revnumber><date>2024-06-12 15:10:40</date><authorinitials>RikHenson</authorinitials></revision><revision><revnumber>45</revnumber><date>2024-06-12 13:58:01</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>44</revnumber><date>2024-06-12 13:57:07</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>43</revnumber><date>2024-06-12 13:55:32</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>42</revnumber><date>2024-06-12 13:54:46</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>41</revnumber><date>2024-06-12 13:52:51</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>40</revnumber><date>2024-06-12 13:43:15</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>39</revnumber><date>2024-06-12 13:42:17</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>38</revnumber><date>2024-06-12 13:38:56</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>37</revnumber><date>2024-06-12 13:35:38</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>36</revnumber><date>2024-06-12 13:29:41</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>35</revnumber><date>2024-06-12 12:30:29</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>34</revnumber><date>2024-06-12 12:28:49</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>33</revnumber><date>2024-06-12 12:27:49</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>32</revnumber><date>2024-06-12 12:26:16</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>31</revnumber><date>2024-06-12 12:24:44</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>30</revnumber><date>2024-06-12 12:22:08</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>29</revnumber><date>2024-06-12 12:19:53</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>28</revnumber><date>2024-06-12 11:27:04</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>27</revnumber><date>2024-06-12 11:23:41</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>26</revnumber><date>2024-06-11 16:06:43</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>25</revnumber><date>2024-06-11 16:04:33</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>24</revnumber><date>2024-06-11 16:01:40</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>23</revnumber><date>2024-06-11 15:59:35</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>22</revnumber><date>2024-06-10 16:00:32</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>21</revnumber><date>2024-06-10 15:58:27</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>20</revnumber><date>2024-06-10 15:56:49</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>19</revnumber><date>2024-06-10 15:55:37</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>18</revnumber><date>2024-06-10 15:52:37</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>17</revnumber><date>2024-06-10 15:51:02</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>16</revnumber><date>2024-06-10 15:34:20</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>15</revnumber><date>2024-06-10 15:29:27</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>14</revnumber><date>2024-06-10 15:25:02</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>13</revnumber><date>2024-06-10 15:21:05</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>12</revnumber><date>2024-06-10 15:20:15</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>11</revnumber><date>2024-06-10 14:19:21</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>10</revnumber><date>2024-06-10 14:18:49</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>9</revnumber><date>2024-06-10 14:18:05</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>8</revnumber><date>2024-06-10 13:58:30</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>7</revnumber><date>2024-06-10 13:55:30</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>6</revnumber><date>2024-06-10 13:51:46</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>5</revnumber><date>2024-06-10 13:51:00</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>4</revnumber><date>2024-06-10 13:50:15</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>3</revnumber><date>2024-06-10 13:47:02</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>2</revnumber><date>2024-06-10 13:44:04</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>1</revnumber><date>2024-06-10 13:38:02</date><authorinitials>OlafHauk</authorinitials></revision></revhistory></articleinfo><section><title>Course Material for COGNESTIC 2024</title><para>The Cognitive Neuroimaging Skills Training In Cambridge (COGNESTIC) is a 2-week  course that provides researchers with training in state-of-the-art methods for reproducible and open neuroimaging analysis and related methods. You can find more information on the <ulink url="https://www.mrc-cbu.cam.ac.uk/events/cognestic-2024/">COGNESTIC webpage</ulink>. </para><section><title>Software Installation Instructions</title><para>Attendees must read and follow these <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=COGNESTIC+Preparation.pdf">pre-course preparations</ulink>. </para></section><section><title>Essential Preparation Materials</title><para>You will find the course easier if you can study as much of the material below in advance (e.g, many of the videos below give the theory to the examples we will work through in the course). This section contains essential viewing; a second section contains less critical background, but which you might nonetheless find useful. </para><para> <anchor id="openscience"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Background to Open Science</emphasis> </para><para> Rik Henson </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://youtu.be/kTVtc7kjVQg">Open Cognitive Neuroscience</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="pythonprimer"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Primer on Python</emphasis> </para><para> Kshipra Gurunandan </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://colab.research.google.com/github/cs231n/cs231n.github.io/blob/master/python-colab.ipynb">Introduction to Python and notebooks</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="structuralmri"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Structural MRI I and II - VBM and surface-based analysis</emphasis></para><para> Marta Correia </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://youtu.be/Psh-GovQLiI">Introduction to MRI Physics and image contrast</ulink> </para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=IntroductionToMRIPhysics.pdf">Slides</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="diffusionmri1"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Diffusion MRI  I - Preprocessing, model fitting and group analysis</emphasis> </para><para> Marta Correia </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://youtu.be/stpmlzO7b6c">Introduction to Diffusion MRI - Part I</ulink> </para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=IntroductionToDiffusionMRI_I.pdf">Slides</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="diffusionmri2"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Diffusion MRI  II - Tractography and the anatomical connectome</emphasis> </para><para> Marta Correia </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://youtu.be/QDJJ6G2ZouA">Introduction to Diffusion MRI - Part II</ulink> </para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=IntroductionToDiffusionMRI_II.pdf">Slides</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="fmri1"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI I - Data Organisation</emphasis> </para><para> Dace Apšvalka </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Viewing </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://youtu.be/OuRdQJMU5ro">fMRI Data Structure &amp; Terminology</ulink> (6:47)</para><para><ulink url="https://youtu.be/5H6XaJLp2V8?si=39BLjouIy8aUaEo7">Brain imaging data structure</ulink> (11:07) </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="fmri2"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI II - Preprocessing</emphasis> </para><para> Dace Apšvalka </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Viewing </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://youtu.be/7Kk_RsGycHs">fMRI Artifacts and Noise</ulink> (11:57) </para><para> <ulink url="https://youtu.be/Qc3rRaJWOc4">Pre-processing I</ulink> (10:17) </para><para> <ulink url="https://youtu.be/qamRGWSC-6g">Pre-processing II</ulink> (7:42) </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="fmri3"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI III - Analysis</emphasis> </para><para> Dace Apšvalka </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Viewing </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=OyLKMb9FNhg">GLM applied to fMR</ulink>I (11:21) </para><para> <ulink url="https://www.youtube.com/watch?v=7MibM1ATai4">Model Building – conditions and contrasts</ulink> (11:48) </para><para> <ulink url="https://www.youtube.com/watch?v=DEtwsFdFwYc%20">Model Building - nuisance variables</ulink> (13:58) </para><para> <ulink url="https://youtu.be/AalIM9-5-Pk">Multiple Comparisons</ulink> (9:03) </para><para> <ulink url="https://youtu.be/__cOYPifDWk">Group-level Analysis I</ulink> (7:05) </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="connectivityfmri"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI Connectivity</emphasis> </para><para> Petar Raykov </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=SqyNPbsgHNQ&amp;ab_channel=PetarRaykov">Functional Connectivity in fMRI</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="networks"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Network Analysis</emphasis> </para><para> Rik Henson </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=H2q3fPxiuvw">Introduction to Network Neuroscience</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg1"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG I – Measurement and Pre-processing</emphasis> </para><para> Olaf Hauk </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>1. <ulink url="https://www.youtube.com/watch?v=S24QG_n6KXk&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=1">Overview of EEG/MEG data processing from raw data to source estimates</ulink> </para><para>Event-related  paradigm, sample dataset, power spectrum, pre-processing, artefact  correction, epoching and averaging, visualization, source estimation.</para><para>  2. <ulink url="https://www.youtube.com/watch?v=GGDc6qZoDZ4&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=2&amp;pp=iAQB">The generation of EEG/MEG signals</ulink> </para><para>Dipole sources, volume currents, sensor types (EEG, magnetometers, gradiometers) and their leadfields.</para><para>3.<ulink url="https://www.youtube.com/watch?v=fLAoRsB2MF8&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=5&amp;pp=iAQB">Frequency and temporal filtering of EEG/MEG data</ulink></para><para>Frequency  spectrum, temporal smoothing, relationship between frequency and time  domain, filters (low-/high-/band-pass, Notch), aliasing, decibels. </para><para> 4. <ulink url="https://www.youtube.com/watch?v=OZFiYeIR2Xk&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=7&amp;pp=iAQB">Differential sensitivity of EEG and MEG</ulink> </para><para>Volume conduction, sensor types and their leadfields, sensitivity maps, dipoles vs spatially extended sources. </para><para> 5.<ulink url="https://www.youtube.com/watch?v=DYOnFu2Cuyw&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=16">Event-related potentials and fields</ulink>  </para><para>Averaging, evoked and induced activity, number of trials, artefact rejection, parametric designs, regression. </para><para> Fore more on this topic see <link linkend="eegmeg1b">here.</link> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg2"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG II  – Head Modelling and Source Estimation</emphasis> </para><para> Olaf Hauk </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>1. <ulink url="https://www.youtube.com/watch?v=duhU5nOsAoc&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=8&amp;pp=iAQB">The EEG/MEG forward model</ulink></para><para>Basic  formulation of the EEG/MEG forward problem, linear equation, basics of  head modelling, examples of sensory evoked responses.</para><para>  2. <ulink url="https://www.youtube.com/watch?v=KlRJ5kpT3eA&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=11&amp;pp=iAQB">The EEG/MEG inverse problem</ulink></para><para>Non-uniqueness, under-determinedness, examples of non-uniqueness, source estimates for sensorily evoked activity. </para><para> 3. <ulink url="https://www.youtube.com/watch?v=X4EZCGPvI1k&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=12&amp;pp=iAQB">The spatial resolution of linear EEG/MEG source estimation</ulink></para><para>Leakage  and blurring, resolution matrix, point-spread functions (PSFs),  cross-talk functions (CTFs), examples of PSFs and CTFs,  regions-of-interest for source estimation.</para><para> 4. <ulink url="http://www.youtube.com/watch?v=XgYev3N1rR0&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=14&amp;pp=iAQB">Noise and regularisation in EEG/MEG source estimates</ulink> </para><para>Over- and under-fitting, smoothing, regularisation parameter, data whitening, noise covariance matrix. </para><para> Fore more on this topic see <link linkend="eegmeg2b">here.</link> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg3"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG III – Time-Frequency and Functional Connectivity </emphasis><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Analysis </emphasis> </para><para> Olaf Hauk </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>1. <ulink url="https://www.youtube.com/watch?v=N4Pm1_C8hlA&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=18&amp;pp=iAQB">Frequency spectra and the Fourier analysis</ulink> </para><para> Periodic basis functions, Fourier Decomposition, frequency spectrum, Nyquist Theorem, steady state response. </para><para> 2. <ulink url="https://www.youtube.com/watch?v=ac0LbTm1Eb8&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=19&amp;pp=iAQB">Time-frequency analysis and wavelets</ulink> </para><para>Fourier  analysis, wavelets, trade-off between time and frequency resolution,  wavelets, number of cycles, evoked and induced activity, beta bursts. </para><para> 3.<ulink url="https://www.youtube.com/watch?v=omWqJ8xD2gs&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=20&amp;pp=iAQB">The basics of functional connectivity methods</ulink> </para><para>Types  of connectivity, amplitude envelope correlation, resting state  analysis, Hilbert envelope, phase-locking, coherence, SNR bias,  time-resolved connectivity. </para><para> Fore more on this topic see <link linkend="eegmeg3b">here.</link> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg4"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG IV – Further Topics and BIDS</emphasis> </para><para> Olaf Hauk &amp; Máté Aller </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>1. <ulink url="https://www.youtube.com/watch?v=sW2i5sZC0zA&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=22&amp;pp=iAQB">Primer on group statistics for EEG/MEG data</ulink></para><para>Regions-of-interest (ROI) analysis, multiple comparison problem, cluster-based permutation tests, problems estimating cluster extent, MNE-Python tutorial.</para><para> 2. <ulink url="https://www.youtube.com/watch?v=08_VgAlVjIg&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=23&amp;pp=iAQB">Primer on decoding and RSA with EEG/MEG data</ulink></para><para>Basics of linear decoding, temporal generalisation, interpreting decoding weights, back-projection, representational similarity analysis (RSA).</para><para> 3. <ulink url="https://www.youtube.com/watch?v=95WZzPGXJes&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=24&amp;pp=iAQB">Primer on multimodal integration</ulink> </para><para> Types of neural “activity”, differential sensitivity of EEG/MEG vs fMRI, source weighting and priors, estimating deep sources with EEG/MEG. </para><para> Fore more on this topic see <link linkend="eegmeg4b">here.</link> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="rsa1"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">MVPA/RSA I and II</emphasis> </para><para> Daniel Mitchell &amp; Máté Aller </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>Excellent presentations from Martin Hebart's MVPA course, on:</para><para><ulink url="https://fmrif.nimh.nih.gov/course/mvpa_course/2017/02_lecture1">Introduction to MVPA</ulink></para><para><ulink url="https://fmrif.nimh.nih.gov/course/mvpa_course/2017/03_lecture2">Introduction to classification</ulink>. </para><para> If the links don't work, download from <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2023?action=AttachFile&amp;do=view&amp;target=02_lecture1_MVPA_intro.mp4">here</ulink> and <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2023?action=AttachFile&amp;do=view&amp;target=03_lecture2_Classification.mp4">here</ulink>. </para><para> <ulink url="https://fmrif.nimh.nih.gov/course/mvpa_course/2017/08_lecture6">Martin Hebart's lecture on RSA</ulink>. If the link fails, download from <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2023?action=AttachFile&amp;do=view&amp;target=08_lecture6_RSA.mp4">here</ulink>. </para><para> <ulink url="https://www.youtube.com/watch?v=08_VgAlVjIg&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=23">Primer on decoding and RSA with EEG/MEG data</ulink> </para></entry></row></tbody></tgroup></informaltable></section><section><title>Additional Extra</title><para>If you want additional background, consider some of the below: </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Background to Open Science</emphasis> </para><para> Rik Henson </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Websites</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://osf.io/">OSF</ulink> </para><para> <ulink url="https://www.ukrn.org/primers/">UKRN</ulink> </para><para> <ulink url="https://bids.neuroimaging.io/">BIDS</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://doi.org/10.1038/s41562-016-0021">Munafo et al, 2017, problems in science</ulink> </para><para> <ulink url="https://doi.org/10.1038/nrn3475">Button et al, 2013, power in neuroscience</ulink> </para><para> <ulink url="https://doi.org/10.1038/nrn.2016.167">Poldrack et al, 2017, reproducible neuroimaging</ulink> </para><para> <ulink url="https://doi.org/10.1038/s41586-022-04492-9">Marek et al, 2022, power in neuroimaging association studies</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=D0VKyjNGvrs">Statistical power in neuroimaging</ulink> </para><para> <ulink url="https://www.youtube.com/watch?v=zAzTR8eq20k">PayWall: open access</ulink> </para><para> <ulink url="https://www.facebook.com/LastWeekTonight/videos/896755337120143">Comedian's Perspective on science and media</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Slides</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=COGNESTIC_OpenCogNeuro.pdf">Open Science Talk Slides</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="pythonprimer"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Primer on Python</emphasis> </para><para> Kshipra Gurunandan </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.python.org/">Python</ulink>, <ulink url="https://pandas.pydata.org/">Pandas</ulink>, <ulink url="https://numpy.org/">NumPy</ulink>, <ulink url="https://matplotlib.org/">Matplotlib</ulink>, <ulink url="https://seaborn.pydata.org/">Seaborn</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://openneuro.org/datasets/ds000117/versions/1.0.5">Wakeman Multimodal</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Useful references</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.w3schools.com/python/default.asp">Python concepts with examples</ulink>, <ulink url="https://quickref.me/python.html">Quick reference</ulink>, <ulink url="https://blog.finxter.com/python-cheat-sheets/">Cheatsheets</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Slides and scripts</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=Primer+on+Python.pdf">Slides</ulink> <ulink url="https://github.com/MRC-CBU/COGNESTIC/tree/main/01_Primer_on_Python">Notebooks and HTMLs</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="structuralmri"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Structural MRI I - Voxel-based morphometry</emphasis><emphasis role="strong"> </emphasis></para><para> Marta Correia </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/">FSL</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=IntroductionToGLM.pdf">Introduction to GLM for structural MRI analysis</ulink> </para><para> <ulink url="https://pubmed.ncbi.nlm.nih.gov/11525331/">Good et al, 2001, A VBM study of ageing</ulink> </para><para> <ulink url="https://pubmed.ncbi.nlm.nih.gov/15501092/">Smith et al, 2004, Structural MRI analysis in FSL</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="structuralmri2"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Structural MRI II - Surface-based analyses</emphasis><emphasis role="strong"> </emphasis></para><para> Marta Correia </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://surfer.nmr.mgh.harvard.edu/">Freesurfe</ulink>r </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://pubmed.ncbi.nlm.nih.gov/9931268/">Dale et al, 1999, Cortical surface-based analysis I</ulink> </para><para> <ulink url="https://pubmed.ncbi.nlm.nih.gov/9931269/">Fischl et al, 1999, Cortical surface-based analysis II</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=6eJMxh7PlOY">Using the command line</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="diffusionmri1"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Diffusion MRI  I - Preprocessing, Model Fitting and Group Analysis </emphasis></para><para> Marta Correia </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://dipy.org/">dipy</ulink>, <ulink url="https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/">FSL</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FDT">FSL Diffusion Toolbox Wiki</ulink> </para><para> <ulink url="https://doi.org/10.1371/journal.pbio.1002203">Le Bihan et al, 2015, What water tells us about biological tissues</ulink> </para><para> <ulink url="https://doi.org/10.3389/fnins.2013.00031">Soares et al, 2013, A short guide to Diffusion Tensor Imaging</ulink> </para><para> <ulink url="https://pubmed.ncbi.nlm.nih.gov/16624579/">Smith et al, 2006, Tract-based spatial statistics (TBSS)</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="diffusionmri2"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Diffusion MRI  II - Tractography and the Anatomical Connectome</emphasis> </para><para> Marta Correia </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://dipy.org/">dipy</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.sciencedirect.com/science/article/pii/B9780123964601000196">MR Diffusion Tractography</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="fmri1extra"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI I - Data Organisation</emphasis> </para><para> Dace Apšvalka </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Software </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://heudiconv.readthedocs.io/en/latest/">HeudiConv</ulink>, <ulink url="https://bids-standard.github.io/pybids/">PyBIDS</ulink>, <ulink url="https://nipy.org/nibabel/">NiBabel</ulink>, <ulink url="https://nilearn.github.io/stable/index.html">Nilearn</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Websites </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://bids.neuroimaging.io/">Brain Imaging Data Structure</ulink> </para><para> <ulink url="https://bids-standard.github.io/bids-starter-kit/">BIDS Starter Kit</ulink> </para><para> <ulink url="https://bids-specification.readthedocs.io/en/stable/">BIDS Specification v1.9.0</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Suggested reading </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.nature.com/articles/sdata201644">The brain imaging data structure (BIDS)</ulink>, Gorgolewski et al., 2016</para><para><ulink url="https://doi.org/10.1162/imag_a_00103">The past, present, and future of the brain imaging data structure (BIDS)</ulink>, Poldrack et al., 2024</para></entry></row></tbody></tgroup></informaltable><para> <anchor id="fmri2extra"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI II - Pre-processing</emphasis> </para><para> Dace Apšvalka </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Software<emphasis role="underline"> </emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://mriqc.readthedocs.io/en/latest/">MRIQC</ulink>, <ulink url="https://fmriprep.org/en/stable/">fMRIprep</ulink>, <ulink url="https://nipype.readthedocs.io/en/latest/">NiPype</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Suggested reading<emphasis role="underline"> </emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://link.springer.com/article/10.1007/s11065-015-9294-9">Functional Magnetic Resonance Imaging Methods</ulink>, Chen &amp; Glover, 2015 </para><para> <ulink url="https://doi.org/10.3389/fnimg.2022.1073734">Quality control in functional MRI studies with MRIQC and fMRIPrep</ulink>, Provins et al., 2023 </para><para> <ulink url="https://www.nature.com/articles/s41592-018-0235-4">fMRIPrep: a robust preprocessing pipeline for functional MRI</ulink>, Esteban et al., 2018 </para><para> <ulink url="https://doi.org/10.3389/fninf.2011.00013">Nipype: a flexible, lightweight and extensible neuroimaging data processing framework in Python</ulink>, Gorgolewski et al., 2011 </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="fmri3extra"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI III - Analysis</emphasis> </para><para> Dace Apšvalka </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Software </para></entry><entry colsep="1" rowsep="1"><para><ulink url="http://nilearn.github.io/stable/index.html">Nilearn</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Suggested reading </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://doi.org/10.1214/09-STS282">The Statistical Analysis of fMRI Data</ulink>, Lindquist, 2008 </para><para> <ulink url="https://doi.org/10.1191/0962280203sm341ra">Controlling the familywise error rate in functional neuroimaging: a comparative review</ulink>, Nichols &amp; Hayasaka, 2003 </para><para> <ulink url="https://www.nature.com/articles/s41596-020-0327-3">Analysis of task-based functional MRI data preprocessed with fMRIPrep</ulink>, Esteban et al., 2020 </para><para> <ulink url="https://doi.org/10.1016/j.neuroimage.2007.11.048">Guidelines for reporting an fMRI study</ulink>, Poldrack et al., 2008 </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Suggested viewing </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=YfeMIcDWwko">Model Building - temporal basis sets</ulink> (11:08)</para><para><ulink url="https://www.youtube.com/watch?v=Ab-5AbJ8gAs">GLM Estimation</ulink> (9:11)</para><para><ulink url="https://youtu.be/Mb9LDzvhecY">Noise Models- AR models</ulink> (9:57)</para><para><ulink url="https://youtu.be/NRunOo7EKD8">Inference- Contrasts and t-tests</ulink> (11:05)</para><para><ulink url="https://youtu.be/AalIM9-5-Pk">Multiple Comparisons</ulink> by Martin Lindquist and Tor Wager (9:03)</para><para><ulink url="https://youtu.be/MxQeEdVNihg">FWER Correction</ulink> (16:11)</para><para><ulink url="https://youtu.be/W9ogBO4GEzA">FDR Correction</ulink> (5:25)</para><para><ulink url="https://youtu.be/N7Iittt8HrU">More about multiple comparisons</ulink> (14:39) </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="connectivityfmri"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">fMRI Connectivity</emphasis>  </para><para> Petar Raykov </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://nilearn.github.io/stable/index.html">Nilearn</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://nilearn.github.io/dev/modules/generated/nilearn.datasets.fetch_development_fmri.html">movie dataset</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="http://dx.doi.org/10.1016/j.tics.2013.09.016">Resting-state functional Connectivity</ulink></para><para> <ulink url="https://doi.org/10.1016/j.neuroimage.2013.04.007">Learning and comparing functional connectomes across subjects</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=SqyNPbsgHNQ&amp;ab_channel=PetarRaykov">fMRI Functional Connectivity in fMRI</ulink></para><para><ulink url="https://www.youtube.com/watch?v=1VOKsWWLgjk&amp;ab_channel=RikHenson&amp;t=15m10s">Overview of Effective Connectivity (not covered in person)</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Tutorial slides and scripts</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://github.com/ppraykov/FCCognestic2023">Functional Connectivity Nilearn Practical</ulink></para><para><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=Multimodal_DCM_cognestic_tutorial_fMRI.pdf">DCM tutorial in SPM (not covered in-person)</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="networksb"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Brain Network Analysis</emphasis> </para><para> Rik Henson </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://pypi.org/project/bctpy/">Python 3.7+,</ulink> <ulink url="https://nxviz.readthedocs.io/en/latest/">nxviz</ulink>, <ulink url="https://python-louvain.readthedocs.io/en/latest/">python-louvain</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"/></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>- (Review article) Bullmore, E., Sporns, O. Complex brain networks: graph theoretical analysis of structural and functional systems. <emphasis>Nat Rev Neurosci</emphasis> <emphasis role="strong">10</emphasis>, 186–198 (2009). <ulink url="https://doi.org/10.1038/nrn2575"/> </para><para> - (Textbook reference for more information) Alex Fornito, Andrew Zalesky, and Edward Bullmore. <emphasis>Fundamentals of brain network analysis</emphasis>. Academic press, 2016. </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=HjSGqwAFRcc">Understanding your brain as a network and as art</ulink> by Prof. Dani Bassett. </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Slides</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://github.com/isebenius/COGNESTIC_network_analysis/tree/main">https://github.com/isebenius/COGNESTIC_network_analysis/</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=COGNESTIC23-presentation_Sebenius.pdf">Slides</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg1b"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG I – Measurement and Pre-processing</emphasis> </para><para> Olaf Hauk </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software and datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>This will be part of a download that will become available later.</para><para> <ulink url="https://mne.tools/stable/index.html">MNE-Python</ulink> software homepage </para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE_Installation_Instructions.pdf">MNE stand-alone installation instructions for COGNESTIC</ulink></para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE-Python_datasets.ipynb">Jupyter script to download sample datasets in MNE-Pytho</ulink>n </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="strong">Essential</emphasis> and suggested viewing </para></entry><entry colsep="1" rowsep="1"><para><emphasis role="strong">0. <ulink url="https://www.youtube.com/watch?v=S24QG_n6KXk&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=1">Overview of EEG/MEG data processing from raw data to source estimates</ulink></emphasis> </para><para>Event-related paradigm, sample dataset, power spectrum, pre-processing, artefact correction, epoching and averaging, visualization, source estimation.</para><para> <emphasis role="strong">1. </emphasis><ulink url="https://www.youtube.com/watch?v=KQoR9uXLxTg&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=1">A brief history of timing</ulink></para><para> A brief overview of the history of bioelectromagnetism, EEG and MEG<emphasis role="strong">.</emphasis> </para><para> <emphasis role="strong">2. <ulink url="https://www.youtube.com/watch?v=GGDc6qZoDZ4&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=2&amp;pp=iAQB">The generation of EEG/MEG signals</ulink></emphasis> </para><para>Dipole sources, volume currents, sensor types (EEG, magnetometers, gradiometers) and their leadfields.</para><para><emphasis role="strong"> 3. </emphasis><ulink url="http://www.youtube.com/watch?v=tHzBtNQaoSI&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=3&amp;pp=iAQB">Basics of EEG/MEG artefact correction</ulink> </para><para> Physiological and non-physiological artefacts, data decompositions, frequency/temporal/spatial filtering. </para><para><emphasis role="strong">4.</emphasis> <emphasis role="strong"><ulink url="https://www.youtube.com/watch?v=fLAoRsB2MF8&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=5&amp;pp=iAQB">Frequency and temporal filtering of EEG/MEG data</ulink></emphasis></para><para><emphasis role="strong"> </emphasis>Frequency spectrum, temporal smoothing, relationship between frequency and time domain, filters (low-/high-/band-pass, Notch), aliasing, decibels. </para><para><emphasis role="strong">5.</emphasis> <ulink url="https://www.youtube.com/watch?v=mCvPlPlY9Og&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=7&amp;pp=iAQB">Topographical artefact correction of EEG/MEG data</ulink> </para><para>Independent Component Analysis (ICA), Signal Space Projection (SSP), eye movement and heart beat artefacts.</para><para><emphasis role="strong">6.</emphasis> <ulink url="https://www.youtube.com/watch?v=liMV6hm_uEs&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=5&amp;pp=iAQB">Maxfiltering of MEG data</ulink></para><para>  Signal Space Separation, options of Maxfilter software (e.g. movement compensation).</para><para> <emphasis role="strong">7. <ulink url="https://www.youtube.com/watch?v=OZFiYeIR2Xk&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=7&amp;pp=iAQB">Differential sensitivity of EEG and MEG</ulink></emphasis> </para><para>Volume conduction, sensor types and their leadfields, sensitivity maps, dipoles vs spatially extended sources. </para><para> <emphasis role="strong">8.</emphasis> <emphasis role="strong"><ulink url="https://www.youtube.com/watch?v=DYOnFu2Cuyw&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=16">Event-related potentials and fields</ulink></emphasis>  </para><para>Averaging, evoked and induced activity, number of trials, artefact rejection, parametric designs, regression.</para><para> <ulink url="https://www.youtube.com/watch?v=Bmt89hHyxuM">+ Origin, significance, and interpretation of EEG</ulink> (Michael X Cohen) </para><para><ulink url="https://www.youtube.com/watch?v=z0JlHS9kulA">+ Analysing MEG data with MNE-Python and its ecosystem</ulink> (Alex Gramfort)</para><para> <ulink url="https://www.youtube.com/playlist?list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5">+ List of EEG/MEG lectures</ulink></para><para> MNE-Python tutorials:</para><para><ulink url="http://mne.tools/stable/auto_tutorials/intro/10_overview.html#sphx-glr-auto-tutorials-intro-10-overview-py">Overview of MNE-Python processing pipeline from preprocessing to source estimation</ulink></para><para> <ulink url="https://mne.tools/stable/auto_tutorials/preprocessing/index.html">Preprocessing</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://pubmed.ncbi.nlm.nih.gov/25128257/">Digitial Filtering</ulink> </para><para><ulink url="https://www.sciencedirect.com/science/article/pii/S0896627319301746">Filtering How To</ulink> </para><para> <ulink url="https://iopscience.iop.org/article/10.1088/0031-9155/51/7/008">Maxwell Filtering</ulink> </para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=General+EEGMEG+Literature.pdf">General EEG/MEG Literature</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Slides and scripts<emphasis role="underline"> </emphasis> </para></entry><entry colsep="1" rowsep="1"><para>Slides: <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG1_1_Measurement.pdf">1</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG1_2_Preprocessing.pdf">2</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG1_3_Averaging.pdf">3</ulink> <ulink url="https://github.com/olafhauk/COGNESTIC2024scripts/">Scripts</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg2b"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG II  – Head Modelling and Source Estimation</emphasis> </para><para> Olaf Hauk </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software and datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>This will be part of a download that will become available later.</para><para> <ulink url="https://mne.tools/stable/index.html">MNE-Python</ulink> software homepage </para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE_Installation_Instructions.pdf">MNE stand-alone installation instructions for COGNESTIC</ulink></para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE-Python_datasets.ipynb">Jupyter script to download sample datasets in MNE-Pytho</ulink>n </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="strong">Essential</emphasis> and suggested viewing </para></entry><entry colsep="1" rowsep="1"><para><emphasis role="strong">0. <ulink url="https://www.youtube.com/watch?v=S24QG_n6KXk&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=1">Overview of EEG/MEG data processing from raw data to source estimates</ulink></emphasis> </para><para>Event-related  paradigm, sample dataset, power spectrum, pre-processing, artefact  correction, epoching and averaging, visualization, source estimation.</para><para> <emphasis role="strong">1. <ulink url="https://www.youtube.com/watch?v=duhU5nOsAoc&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=8&amp;pp=iAQB">The EEG/MEG forward model</ulink></emphasis></para><para>Basic formulation of the EEG/MEG forward problem, linear equation, basics of head modelling, examples of sensory evoked responses.<emphasis role="strong"> </emphasis></para><para> <emphasis role="strong">2.</emphasis> <ulink url="https://www.youtube.com/watch?v=BsvKPknaSNo&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=9&amp;pp=iAQB">Source spaces for EEG/MEG source estimation</ulink></para><para> Cortical surface, volumetric source space, spatial sampling, spatial normalisation, subcortical areas, source orientation. </para><para> <emphasis role="strong">3.</emphasis> <ulink url="https://www.youtube.com/watch?v=259MhTSCVMg&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=10&amp;pp=iAQB">Head models for EEG/MEG source estimation &lt;&lt;BR&gt;&gt;</ulink>Volume conduction, Boundary Element Method (BEM), Finite Element Method (FEM), head model accuracy.  </para><para> <emphasis role="strong">4. <ulink url="https://www.youtube.com/watch?v=KlRJ5kpT3eA&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=11&amp;pp=iAQB">The EEG/MEG inverse problem</ulink></emphasis></para><para>Non-uniqueness, under-determinedness, examples of non-uniqueness, source estimates for sensorily evoked activity<emphasis role="strong">. </emphasis></para><para> <emphasis role="strong">5. <ulink url="https://www.youtube.com/watch?v=X4EZCGPvI1k&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=12&amp;pp=iAQB">The spatial resolution of linear EEG/MEG source estimation</ulink></emphasis></para><para><emphasis role="strong"> </emphasis>Leakage and blurring, resolution matrix, point-spread functions (PSFs), cross-talk functions (CTFs), examples of PSFs and CTFs, regions-of-interest for source estimation.<emphasis role="strong"> </emphasis></para><para> <emphasis role="strong">6. </emphasis><ulink url="https://www.youtube.com/watch?v=OyXzuo6gKcg&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=13&amp;pp=iAQB">Comparison of spatial resolution for linear EEG/MEG source estimation methods</ulink> </para><para>Point-spread functions (PSFs), cross-talk functions (CTFs), resolution metrics (localisation error, spatial deviation), combination of EEG and MEG, PSFs and CTFs for minimum-norm type methods and beamformers, comparison of resolution metrics for minimum-norm type methods and beamformers. </para><para> <emphasis role="strong">7.</emphasis> <emphasis role="strong"><ulink url="http://www.youtube.com/watch?v=XgYev3N1rR0&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=14&amp;pp=iAQB">Noise and regularisation in EEG/MEG source estimates</ulink> </emphasis></para><para>Over- and under-fitting, smoothing, regularisation parameter, data whitening, noise covariance matrix.<emphasis role="strong"> </emphasis></para><para> + <ulink url="https://www.youtube.com/playlist?list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5">List of EEG/MEG lectures</ulink></para><para> MNE-Python Tutorials: </para><para> <ulink url="https://mne.tools/stable/auto_tutorials/forward/index.html">Forward Models and Source Spaces</ulink></para><para> <ulink url="https://mne.tools/stable/auto_tutorials/inverse/index.html">Source Estimation</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://pubmed.ncbi.nlm.nih.gov/35390459/">Linear source estimation and spatial resolution</ulink></para><para> <ulink url="https://pubmed.ncbi.nlm.nih.gov/24434678/">Comparison of common head models</ulink> (e.g. BEM)</para><para> <ulink url="https://pubmed.ncbi.nlm.nih.gov/24971512/">Guidelines for head modelling</ulink> (incl. FEM)</para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=General+EEGMEG+Literature.pdf">General EEG/MEG Literature</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Slides and scripts<emphasis role="underline"> </emphasis> </para></entry><entry colsep="1" rowsep="1"><para>Slides: <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG2_1_ForwardModelling.pdf">1</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG2_2_MNE.pdf">2</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG2_3_SpatialResolution.pdf">3</ulink> <ulink url="https://github.com/olafhauk/COGNESTIC2024scripts/">Scripts</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg3b"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG III – Time-Frequency and Functional Connectivity </emphasis><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Analysis </emphasis> </para><para> Olaf Hauk </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software and datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>This will be part of a download that will become available later.</para><para> <ulink url="https://mne.tools/stable/index.html">MNE-Python</ulink> software homepage </para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE_Installation_Instructions.pdf">MNE stand-alone installation instructions for COGNESTIC</ulink></para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE-Python_datasets.ipynb">Jupyter script to download sample datasets in MNE-Pytho</ulink>n </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="strong">Essential</emphasis> and suggested viewing </para></entry><entry colsep="1" rowsep="1"><para><emphasis role="strong">1.</emphasis> <ulink url="https://www.youtube.com/watch?v=zl3tyPLuUm8&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=17&amp;pp=iAQB">The basics of signals in the frequency domain</ulink> </para><para>Oscillations, periodic signals, sine and cosine, polar representation, complex numbers. </para><para> <emphasis role="strong">2. </emphasis> <emphasis role="strong"><ulink url="https://www.youtube.com/watch?v=N4Pm1_C8hlA&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=18&amp;pp=iAQB">Frequency spectra and the Fourier analysis</ulink></emphasis> </para><para> Periodic basis functions, Fourier Decomposition, frequency spectrum, Nyquist Theorem, steady state response. </para><para> <emphasis role="strong">3. </emphasis> <emphasis role="strong"><ulink url="https://www.youtube.com/watch?v=ac0LbTm1Eb8&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=19&amp;pp=iAQB">Time-frequency analysis and wavelets</ulink></emphasis> </para><para>Fourier analysis, wavelets, trade-off between time and frequency resolution, wavelets, number of cycles, evoked and induced activity, beta bursts. </para><para> <emphasis role="strong">4.</emphasis> <emphasis role="strong"><ulink url="https://www.youtube.com/watch?v=omWqJ8xD2gs&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=20&amp;pp=iAQB">The basics of functional connectivity methods</ulink></emphasis> </para><para>Types of connectivity, amplitude envelope correlation, resting state analysis, Hilbert envelope, phase-locking, coherence, SNR bias, time-resolved connectivity.  </para><para><emphasis role="strong">5. </emphasis><ulink url="https://www.youtube.com/watch?v=gqm2RAz9I8A&amp;list=PLp67eqWCj2f_DBsCMkIOBpBbLWGAUKtu5&amp;index=21&amp;pp=iAQB">Spatial resolution (leakage) and connectivity</ulink></para><para>Connectivity in sensor and source space, point-spread and cross-talk, (non-)zero-lag signals, orthogonalisation, imaginary part of coherency, source space parcellations. </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://pubmed.ncbi.nlm.nih.gov/26778976/">Tutorial on Functional Connectivity</ulink></para><para> <ulink url="https://mitpress.mit.edu/books/analyzing-neural-time-series-data">Analyzing Neural Time Series Data</ulink></para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=General+EEGMEG+Literature.pdf">General EEG/MEG Literature</ulink><emphasis role="underline"> </emphasis> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Slides and scripts </para></entry><entry colsep="1" rowsep="1"><para>Slides: <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG3_1_TimeFrequency.pdf">1</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG3_2_FunctionalConnectivity.pdf">2</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG3_3_AdvancedFunctionalConnectivity.pdf">3</ulink> <ulink url="https://github.com/olafhauk/COGNESTIC2024scripts/">Scripts</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="eegmeg4b"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="10*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">EEG/MEG IV – Statistics and BIDS</emphasis> </para><para> Olaf Hauk &amp; Máté Aller </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://mne.tools/stable/index.html">MNE-Python</ulink></para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE_Installation_Instructions.pdf">MNE Installation for Cognestic</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>Sample dataset in MNE-Python. <ulink url="https://mne.tools/stable/auto_tutorials/time-freq/index.html">Tutorials</ulink></para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE_Installation_Instructions.pdf">MNE Installation for Cognestic</ulink></para><para> <ulink url="https://openneuro.org/datasets/ds000248/versions/1.2.4">M/EEG combined dataset</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=MNE-Python_datasets.ipynb">Download Datasets</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.pnas.org/doi/10.1073/pnas.1705414114">Estimating subcortical sources from EEG/MEG</ulink></para><para> <ulink url="https://mne.tools/mne-bids/stable/auto_examples/convert_mne_sample.html">Tutorial on converting MEG data to BIDS format</ulink></para><para> <ulink url="https://mne.tools/mne-bids-pipeline/1.4/examples/ds000248_base.html">Example using MNE-BIDS-Pipeline for processing combined M/EEG data</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Suggested viewing</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.youtube.com/watch?v=F0Ex9s-GZyg">Talk on Multimodal Integration</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Slides and scripts</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EEGMEG4-advanced.zip">Notebooks</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=Exercises_EEGMEG.pdf">Exercises</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG4_1_Stats.pdf">Slides1</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG4_2_Multimodal.pdf">Slides2</ulink></para><para> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=Notebooks_mne_bids_pipeline.zip">Notebooks mne-bids-pipeline</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=mne-bids-pipeline_cognestic.pdf">Slides mne-bids-pipeline</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="rsa1"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="12*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">MVPA/RSA I</emphasis><emphasis role="strong"> </emphasis></para><para> Daniel Mitchell </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.python.org/">Python 3.7+</ulink>, including numpy, matplotlib, nilearn &amp; <ulink url="https://scikit-learn.org/stable/">scikit-learn</ulink>. </para><para> (To visualise MRI data, you can use your software of choice, although for nifti format data you might like to consider <ulink url="https://www.nitrc.org/projects/mricron">MRIcroN</ulink> or <ulink url="https://www.nitrc.org/projects/mricrogl">MRIcroGL</ulink>.) </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://openneuro.org/datasets/ds003965/versions/1.0.0">&quot;NI-edu-data-minimal&quot; faces dataset</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://academic.oup.com/scan/article/4/1/101/1613450">Mur et al. (2009) Revealing representational content with pattern-information fMRI--an introductory guide</ulink></para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Slides and scripts </emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://github.com/MRC-CBU/COGNESTIC/tree/main/09_MVPA_MRI">Notebooks and slides are on the COGNESTIC github</ulink> </para></entry></row></tbody></tgroup></informaltable><para> <anchor id="rsa2"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0" colwidth="12*"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">MVPA/RSA II</emphasis><emphasis role="strong"> </emphasis></para><para> Daniel Mitchell &amp; Máté Aller </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Software</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>Python implementation of the RSA Toolbox: <ulink url="https://github.com/rsagroup/rsatoolbox">Version 3.0</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Datasets</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>Example data included with RSA toolbox </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://www.frontiersin.org/articles/10.3389/neuro.06.004.2008/full">Kriegeskorte et al. (2008) Representational similarity analysis - connecting the branches of systems neuroscience</ulink></para><para><ulink url="https://www.cell.com/trends/cognitive-sciences/fulltext/S1364-6613(13)00127-7">Kriegeskorte &amp; Kievit (2013) Representational geometry: integrating cognition, computation, and the brain</ulink> </para><para><ulink url="https://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1003553">Nili et al. (2014) A toolbox for representational similarity analysis</ulink></para><para> <ulink url="https://elifesciences.org/articles/82566">Schutt et al. (2023) Statistical inference on representational geometries</ulink></para><para>EEG/MEG: </para><para> <ulink url="https://pubmed.ncbi.nlm.nih.gov/27779910/%20">Tutorial on EEG/MEG decoding</ulink></para><para> <ulink url="https://www.sciencedirect.com/science/article/pii/S1364661314000199">Temporal Generalization</ulink> <ulink url="https://www.sciencedirect.com/science/article/pii/S1053811913010914">Interpretation of Weight Vectors</ulink> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Slides and scripts</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>We will demo the RSA toolbox using the jupyter notebooks in the &quot;demos&quot; folder of the toolbox, also available, along with the slides, on the COGNESTIC<ulink url="https://github.com/MRC-CBU/COGNESTIC/tree/main/09_MVPA_MRI">github</ulink>. </para><para><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EEGMEG5-decoding.zip">EEGMEG Notebooks</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=EMEG5_Decoding.pdf">EEG/MEG Slides</ulink></para></entry></row></tbody></tgroup></informaltable><para> <anchor id="stimulation"/> </para><informaltable><tgroup cols="3"><colspec colname="col_0"/><colspec colname="col_1"/><colspec colname="col_2"/><tbody><row rowsep="1"><entry align="center" colsep="1" nameend="col_2" namest="col_0" rowsep="1"><para><!--"~+bigger+~" is not applicable to DocBook--><emphasis role="strong">Brain Stimulation</emphasis><emphasis role="strong"> </emphasis></para><para> Elizabeth Michael &amp; Ajay Halai </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para><emphasis role="underline">Reading</emphasis> </para></entry><entry colsep="1" rowsep="1"><para>TMS-EEG: </para><para> <ulink url="https://doi.org/10.1016/j.neuroimage.2016.10.031"/> </para><para> <ulink url="https://doi.org/10.1016/j.xpro.2022.101435"/> </para><para> <ulink url="https://pressrelease.brainproducts.com/tms-eeg/"/> </para><para> TMS-fMRI: </para><para> <ulink url="https://doi.org/10.31234/osf.io/9fyxb"/> </para><para> <ulink url="https://doi.org/10.1101/2021.05.28.446111"/> </para></entry></row><row rowsep="1"><entry colsep="1" rowsep="1"><para>Slides</para></entry><entry colsep="1" rowsep="1"><para><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=BrainStimSession2024_2.pdf">General</ulink><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=cognestic_TMSEEG.pdf">TMS+EEG</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=TMS_FMRI_COGNESTIC_ASSEM.pdf">TMS+fMRI</ulink> <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/COGNESTIC2024/methods/COGNESTIC2024?action=AttachFile&amp;do=get&amp;target=TMSfMRIArtifacts_V1_prt_nn.pdf">TMS+fMRI_Artefacts</ulink></para></entry></row></tbody></tgroup></informaltable><!--rule (<hr>) is not applicable to DocBook--></section></section></article>