<?xml version="1.0" encoding="utf-8"?><!DOCTYPE article  PUBLIC '-//OASIS//DTD DocBook XML V4.4//EN'  'http://www.docbook.org/xml/4.4/docbookx.dtd'><article><articleinfo><title>OpenDatasets</title><revhistory><revision><revnumber>52</revnumber><date>2022-11-06 13:16:23</date><authorinitials>RussellThompson</authorinitials></revision><revision><revnumber>51</revnumber><date>2022-03-22 13:17:17</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>50</revnumber><date>2022-03-22 13:16:50</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>49</revnumber><date>2022-02-01 14:39:01</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>48</revnumber><date>2022-02-01 14:37:32</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>47</revnumber><date>2022-02-01 14:36:47</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>46</revnumber><date>2021-02-15 13:54:18</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>45</revnumber><date>2021-02-11 14:33:11</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>44</revnumber><date>2021-02-11 14:30:56</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>43</revnumber><date>2021-02-11 14:15:58</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>42</revnumber><date>2021-02-11 14:15:14</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>41</revnumber><date>2021-02-11 14:13:39</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>40</revnumber><date>2021-01-30 16:07:09</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>39</revnumber><date>2021-01-28 15:20:05</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>38</revnumber><date>2021-01-28 15:13:49</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>37</revnumber><date>2021-01-28 15:13:30</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>36</revnumber><date>2021-01-28 15:09:52</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>35</revnumber><date>2021-01-28 15:08:04</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>34</revnumber><date>2020-06-24 13:43:22</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>33</revnumber><date>2020-06-24 13:42:57</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>32</revnumber><date>2020-06-15 16:08:30</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>31</revnumber><date>2020-06-15 16:01:19</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>30</revnumber><date>2020-06-15 16:00:55</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>29</revnumber><date>2020-06-15 16:00:28</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>28</revnumber><date>2020-06-02 16:22:04</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>27</revnumber><date>2020-06-02 16:21:32</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>26</revnumber><date>2020-06-02 16:19:33</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>25</revnumber><date>2020-06-02 16:15:27</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>24</revnumber><date>2020-06-02 16:14:47</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>23</revnumber><date>2020-06-02 16:12:26</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>22</revnumber><date>2020-06-02 16:09:51</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>21</revnumber><date>2020-06-02 16:08:53</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>20</revnumber><date>2020-06-02 13:30:43</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>19</revnumber><date>2020-06-02 12:10:53</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>18</revnumber><date>2020-06-02 12:05:25</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>17</revnumber><date>2020-06-02 12:04:50</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>16</revnumber><date>2020-06-02 12:01:01</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>15</revnumber><date>2020-06-02 11:57:09</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>14</revnumber><date>2020-06-02 11:43:14</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>13</revnumber><date>2020-06-02 11:40:16</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>12</revnumber><date>2020-06-02 11:39:06</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>11</revnumber><date>2020-06-02 11:37:36</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>10</revnumber><date>2020-06-02 11:32:55</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>9</revnumber><date>2020-06-02 11:32:21</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>8</revnumber><date>2020-06-02 11:28:24</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>7</revnumber><date>2020-06-02 11:26:28</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>6</revnumber><date>2020-06-02 11:24:21</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>5</revnumber><date>2020-06-02 11:23:29</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>4</revnumber><date>2020-06-02 11:22:28</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>3</revnumber><date>2020-06-02 11:18:17</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>2</revnumber><date>2020-06-02 11:16:09</date><authorinitials>OlafHauk</authorinitials></revision><revision><revnumber>1</revnumber><date>2020-06-02 11:13:04</date><authorinitials>OlafHauk</authorinitials></revision></revhistory></articleinfo><section><title>List of openly available datasets</title><para>This is a collection of links to openly available datasets. There is a separate page with <ulink url="http://imaging.mrc-cbu.cam.ac.uk/methods/stimdatabases">links to other databases</ulink> (e.g. for stimulus generation and evaluation).  </para><para> If you would like add something, please contact <code>&lt;Olaf DOT Hauk AT mrc-cbu DOT cam DOT ac DOT uk&gt;</code> or <code>&lt;Johan DOT Carlin AT mrc-cbu DOT cam DOT ac DOT uk&gt;</code>. </para><para> You must contact our IT department (<code>&lt;it-help AT mrc-cbu DOT cam DOT ac DOT uk&gt;</code>) before downloading any large datasets at the CBU. Some open datasets may already be <emphasis role="strong"><ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">available at the CBU</ulink></emphasis>.  </para><para> Note that some datasets require registration or licensing before you are allowed to access them. </para><section><title>(f)MRI and Multimodal (+EEG/MEG)</title><para><emphasis role="strong"><ulink url="https://openneuro.org/">OpenNEURO</ulink></emphasis> (free and open platform for sharing MRI, MEG, EEG, iEEG, and ECoG data) </para><para>(formerly <emphasis role="strong"><ulink url="https://openfmri.org/">OpenfMRI</ulink></emphasis>, now deprecated) </para><para><emphasis role="strong"><ulink url="https://en.wikipedia.org/wiki/List_of_neuroscience_databases">Wikipedia</ulink></emphasis> (list of neuroscience databases) </para><para><emphasis role="strong"><ulink url="http://www.cam-can.org/">Cam-CAN</ulink></emphasis> (Cambridge Centre for Aging and Neuroscience large-scale data set). <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">Available at CBU</ulink> </para><para><emphasis role="strong"><ulink url="http://www.nature.com/articles/sdata20151">Multimodal, multi-subject data set</ulink></emphasis> (EMEG and (f)MRI, famous/unfamiliar/scrambled faces). <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">Available at CBU</ulink> </para><para><emphasis role="strong"><ulink url="https://data.donders.ru.nl/collections/di/dccn/DSC_3011020.09_236?0">Mother of unification studies</ulink></emphasis> (204-subject EEG/MEG/fMRI dataset to study language processing). <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">Available at CBU</ulink> </para><para><emphasis role="strong"><ulink url="https://www.humanconnectome.org/">Human Connectome Project</ulink></emphasis> (&quot;unparalleled compilation of neural data&quot;). <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">Available at CBU</ulink> </para><para><emphasis role="strong"><ulink url="https://babyconnectomeproject.org//">Human Connectome Project - Baby Connectome Project</ulink></emphasis> (&quot;data from 500 participants ranging in age from birth to 5 years of age &quot;). <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">Available at CBU</ulink> </para><para><emphasis role="strong"><ulink url="https://www.ukbiobank.ac.uk/">UK Biobank</ulink></emphasis> (huge medical database including neuroimaging data). <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">Available at CBU</ulink> </para><para><emphasis role="strong"><ulink url="https://www.biorxiv.org/content/10.1101/2020.06.16.155317v1/">Amsterdam Open MRI Collection</ulink></emphasis> (A set of multimodal MRI datasets for individual difference analyses) </para><para><emphasis role="strong"><ulink url="https://www.oasis-brains.org/#about">OASIS</ulink></emphasis> (longitudinal neuroimaging, clinical, cognitive, and biomarker dataset for normal aging and Alzheimer’s Disease) </para><para><emphasis role="strong"><ulink url="http://cocomac.g-node.org/main/index.php?">CoCoMac Database</ulink></emphasis>, Collations of Connectivity data on the Macaque brain </para><para><emphasis role="strong"><ulink url="http://neurosynth.org/">Neurosynth</ulink></emphasis> (automated &quot;meta-analysis&quot; of fMRI data) </para><para><emphasis role="strong"><ulink url="http://www.studyforrest.org/">Studyforrest</ulink></emphasis> (publicly available neuroimaging data) </para><para><emphasis role="strong"><ulink url="http://naturalscenesdataset.org/">Natural Scenes Dataset</ulink></emphasis> (7T fMRI dataset on perception and memory of scenes) </para><para><emphasis role="strong"><ulink url="https://github.com/Conxz/multiBrain">multiBrain</ulink></emphasis> (A list of brain imaging datasets with multiple scans per subject.) </para><para><emphasis role="strong"><ulink url="https://www.neuroscience.ox.ac.uk/publications/465809">Enhancing NeuroImaging Genetics through Meta-Analysis (ENIGMA)  Consortium</ulink></emphasis> is a collaborative network of researchers working together on  a range of large-scale studies that integrate data from 70 institutions  worldwide. </para></section><section><title>EEG/MEG</title><para>Note: Some datasets may be part of multimodal studies listed above. </para><para><emphasis role="strong"><ulink url="https://meguk.ac.uk/database/">MEG-UK</ulink></emphasis> MEG-UK Partnership multi-site MEG data. <ulink url="https://imaging.mrc-cbu.cam.ac.uk/methods/OpenDatasetsCBU">Available at CBU</ulink> </para><para><emphasis role="strong"><ulink url="https://erpinfo.org/erp-core">ERP-Core</ulink></emphasis>  (The ERP CORE provides stimulus presentation scripts, data from 40  neurotypical young adults, and EEGLAB/ERPLAB processing scripts for 6  optimized ERP paradigms.) </para><para><emphasis role="strong"><ulink url="https://sccn.ucsd.edu/~arno/fam2data/publicly_available_EEG_data.html">List of EEG/ERP data sets</ulink></emphasis> openly available for download. </para><para><emphasis role="strong"><ulink url="https://www.biorxiv.org/content/10.1101/2022.03.15.484473v1">large and rich EEG dataset for modeling human visual object recognition</ulink></emphasis> (64 EEG channels, 10 participants, each with 82.160 trials spanning 16,740 image conditions) </para><para><emphasis role="strong"><ulink url="https://osf.io/72b89/">Kilo-word ERP database</ulink></emphasis>:  960 words were presented to 75 participants in a go/no-go lexical  decision task while recording event-related potentials (ERPs), see <ulink url="https://www.google.com/url?sa=t&amp;rct=j&amp;q=&amp;esrc=s&amp;source=web&amp;cd=&amp;ved=2ahUKEwjknKjy9b7uAhVkrHEKHURXAjIQFjADegQIBhAC&amp;url=https://hal.archives-ouvertes.fr/hal-01432349/document&amp;usg=AOvVaw1JFi-nO9s693Q4Y9_VT1BO">publication</ulink>. </para><para><emphasis role="strong"><ulink url="https://pubmed.ncbi.nlm.nih.gov/22068921/">MEG-Sim</ulink></emphasis> (simulated MEG data) </para><para><emphasis role="strong"><ulink url="http://nemo.nic.uoregon.edu/wiki/NEMO">Neural ElectroMagnetic Ontologies</ulink></emphasis> (NEMO, resources to support representation, classification, and meta-analysis of brain electromagnetic data </para></section><section><title>Behavioural</title><para>Some links to language-related databases on the <emphasis role="strong"><ulink url="http://imaging.mrc-cbu.cam.ac.uk/language/Databases">Language Group Wiki</ulink></emphasis> </para><para><emphasis role="strong"><ulink url="https://www.ldc.upenn.edu/">Linguistic Data Consortium</ulink></emphasis> (a collection of databases etc., see e.g. their <ulink url="http://www.ldc.upenn.edu/Catalog/">Catalog page</ulink>). </para><para><emphasis role="strong"><ulink url="http://elexicon.wustl.edu/">English Lexicon Project</ulink></emphasis> (orth/phon/lex information on English words, plus Lexical Decision and Reading Latencies, HAL Frequencies etc.) </para><para><emphasis role="strong"><ulink url="http://www.natcorp.ox.ac.uk/">British National Corpus</ulink></emphasis> (English words and phrases) </para><para><emphasis role="strong"><ulink url="http://live.ece.utexas.edu/research/doves/">Database of Visual Eye Movements</ulink></emphasis> (DOVES, collection of eye movements from 29 human observers as they viewed 101 natural calibrated images) </para><para><emphasis role="strong"><ulink url="https://www.nature.com/articles/sdata2016126/">Eye movements during viewing of complex images</ulink></emphasis> </para><para><emphasis role="strong"><ulink url="https://www.sr-research.com/eye-tracking-blog/resources/data-repositories-eye-tracking-research/">Data repositories for eye-tracking research</ulink></emphasis> </para><para><emphasis role="strong"><ulink url="https://arxiv.org/abs/1905.03702">OpenEDS</ulink></emphasis> (large-scale dataset of eye-images captured using virtual reality) </para><para><emphasis role="strong"><ulink url="https://osf.io/82p6x/">ComPlex</ulink></emphasis> eye-movement database of compound word reading in English </para></section></section></article>