-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathself.bib
986 lines (913 loc) · 58.4 KB
/
self.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
@inproceedings{Louthan2009,
author = {George Louthan and
Collin McMillan and
Christopher Johnson and
John Hale},
title = {Toward Robust and Extensible Automatic Protocol Identification},
booktitle = {International Conference on Internet Computing},
year = {2009},
pages = {104-108},
}
@inproceedings{Roberts2010,
author = {Warren Roberts and
Christopher Johnson and
John Hale},
title = {Transparent Emergency Data Destruction},
booktitle = {The 5th International Conference on Information-Warfare \& Security},
year = {2010},
pages = {271-278},
}
@presentation{JohnsonYazdanbakhsh2012,
author = {Christopher J. Johnson and
Arash Yazdanbakhsh},
title = {A minimal model of motion tuning in middle temporal visual cortex},
type = {poster},
organization = {16th International Conference on Cognitive and Neural Systems},
address = {Boston, MA},
month = may,
year = {2012},
}
@presentation{JohnsonBohland2012,
author = {Christopher J. Johnson and
Partha P. Mitra and
Jason W. Bohland},
title = {The Online Brain Atlas Reconciliation Tool (OBART): A
web application for MRI atlas exploration and multi-atlas
labeling},
type = {poster},
organization = {Society for Neuroscience 2012 Annual Meeting},
address = {New Orleans, LA},
year = {2012},
month = oct
}
@presentation{Johnson2013,
author = {Christopher J. Johnson},
title = {Localizing Neural Representations of Speech Sounds},
year = {2013},
month = jun,
type = {oral},
organization={Second CELEST Workshop on Adaptive Brain-Computer
Interactions},
address = {Boston, MA},
}
@presentation{JohnsonBohland2014a,
author = {Christopher J. Johnson and Jason W. Bohland},
title = {Localizing Speech Sound Representations in a Syllable
Repetition Task},
month = feb,
organization = {6th Annual Inter-Science of Learning Conference},
booktitle = {6th Annual Inter-Science of Learning Conference},
address = {Pittsburgh, PA},
year = {2014},
key = {1},
type = {poster},
}
@presentation{JohnsonBohland2014b,
author = {Christopher J. Johnson and Jason W. \dag Bohland},
title = {Mapping the cortical representation of speech sounds during
syllable repetition},
booktitle = {Society for the Neurobiology of Language Annual Meeting},
organization = {Society for the Neurobiology of Language Annual Meeting},
address = {Amsterdam, NL},
month = aug,
year = {2014},
key = {1},
type = {poster},
}
@presentation{Johnson2014,
author = {Christopher J. Johnson and
Jason W. Bohland},
key = {1},
type = {oral},
title = {Localizing categorical speech representations in perception
and production},
booktitle = {2014 Neuroscience Meeting Planner},
organization = {Society for Neuroscience},
note = {Program No. 204.09},
month = nov,
year = {2014},
address = {Washington, DC},
}
@presentation{Markiewicz2015,
author = {Christopher J. Markiewicz and Jason W. Bohland},
title = {Localizing categorical speech representations in
perception and production},
type = {poster},
organization = {Neural Processing in Humans, Animals, and Man},
address = {Boston, MA},
month = jun,
year = {2015}
}
@presentation{Markiewicz2016a,
author = {Christopher J. Markiewicz and
Kroshian, Garen S. and
You, Jacqueline and
\dag Bohland, Jason W.},
title = {Multivariate analysis of input and output representations in
speech},
type = {poster},
organization = {Organization for Human Brain Mapping Annual Meeting},
address = {Geneva},
month = jun,
year = {2016}
}
@presentation{Markiewicz2016b,
author = {Christopher J. Markiewicz},
title = {Multivariate pattern analysis of input and output
representations of speech},
year = {2016},
month = dec,
type = {oral},
organization={Boston Speech Motor Control Working Group},
address = {Boston, MA},
}
@article{Markiewicz2016,
author = {Christopher J. Markiewicz and Bohland, Jason W.},
doi = {10.1016/j.neuroimage.2016.07.023},
issn = {10538119},
journal = {NeuroImage},
month = nov,
pages = {174--190},
title = {Mapping the cortical representation of speech sounds in a syllable repetition task},
volume = {141},
year = {2016}
}
@presentation{Markiewicz2017,
author = {Christopher J. Markiewicz},
title = {Using Python for neuroimaging},
year = {2017},
month = mar,
type = {oral},
organization = {Hands-on Reproducible and Scalable Brain Imaging Analysis with Nipype},
address = {Cambridge, MA},
}
@presentation{Markiewicz2018software,
author = {Christopher J. Markiewicz},
title = {FMRIprep: Building a Robust Preprocessing Pipeline for fMRI},
year = {2018},
month = jun,
type = {Software demonstration},
organization = {Organization for Human Brain Mapping Annual Meeting},
address = {Singapore},
}
@presentation{Markiewicz2018poster,
author = {Markiewicz, Christopher J and
Esteban, Oscar and
Blair, Ross W and
Ma, Feilong and
Kent, James D and
Heinsfeld, Anibal S and
Goncalves, Mathias and
Poldrack, Russell A and
Gorgolewski, Krzysztof J},
title = {FMRIprep: Building a Robust Preprocessing Pipeline for fMRI},
year = {2018},
month = jun,
type = {poster},
organization = {Organization for Human Brain Mapping Annual Meeting},
address = {Singapore},
}
@presentation{Yarkoni2018,
title={Pybids: Python tools for manipulation and analysis of BIDS datasets},
year={2018},
author={Yarkoni, Tal and
{de la Vega}, Alejandro and
DuPre, Elizabeth and
Esteban, Oscar and
Halchenko, Yarik and
Hanke, Michael and
Hayot-Sasson, Valerie and
Ivanov, Alexander and
Kiar, Greg and
Markiewicz, Chris and
McNamara, Quinten and
Petrov, Dmitry and
Salo, Taylor and
Nielson, Dylan and
Poline, Jean-Baptiste and
Poldrack, Russell and
Gorgolewski, Krzysztof J},
month=jun,
organization={Organization for Human Brain Mapping Annual Meeting},
type = {poster},
address = {Singapore},
}
@article{Esteban2018,
author = {Esteban, Oscar and
Markiewicz, Christopher J. and
Blair, Ross W. and
Moodie, Craig A. and
Isik, A. Ilkay and
Erramuzpe, Asier and
Kent, James D. and
Goncalves, Mathias and
DuPre, Elizabeth and
Snyder, Madeleine and
Oya, Hiroyuki and
Ghosh, Satrajit S. and
Wright, Jessey and
Durnez, Joke and
Poldrack, Russell A. and
Gorgolewski, Krzysztof J.},
title = {fMRIPrep: a robust preprocessing pipeline for functional MRI},
year = {2018},
month = dec,
doi = {10.1038/s41592-018-0235-4},
isbn = {1548-7105},
journal = {Nature Methods},
type = {JOUR},
note = {Preprint: \doi{10.1101/306951}}
}
@article{Esteban2020a,
title = {Analysis of task-based functional {MRI} data preprocessed with {fMRIPrep}},
volume = {15},
copyright = {2020 The Author(s), under exclusive licence to Springer Nature Limited},
issn = {1750-2799},
url = {http://www.nature.com/articles/s41596-020-0327-3},
doi = {10.1038/s41596-020-0327-3},
abstract = {Functional magnetic resonance imaging (fMRI) is a standard tool to investigate the neural correlates of cognition. fMRI noninvasively measures brain activity, allowing identification of patterns evoked by tasks performed during scanning. Despite the long history of this technique, the idiosyncrasies of each dataset have led to the use of ad-hoc preprocessing protocols customized for nearly every different study. This approach is time consuming, error prone and unsuitable for combining datasets from many sources. Here we showcase fMRIPrep (http://fmriprep.org), a robust tool to prepare human fMRI data for statistical analysis. This software instrument addresses the reproducibility concerns of the established protocols for fMRI preprocessing. By leveraging the Brain Imaging Data Structure to standardize both the input datasets (MRI data as stored by the scanner) and the outputs (data ready for modeling and analysis), fMRIPrep is capable of preprocessing a diversity of datasets without manual intervention. In support of the growing popularity of fMRIPrep, this protocol describes how to integrate the tool in a task-based fMRI investigation workflow.},
language = {en},
number = {7},
urldate = {2022-01-11},
journal = {Nature Protocols},
author = {Esteban, Oscar and Ciric, Rastko and Finc, Karolina and Blair, Ross W. and Markiewicz, Christopher J. and Moodie, Craig A. and Kent, James D. and Goncalves, Mathias and DuPre, Elizabeth and Gomez, Daniel E. P. and Ye, Zhifang and Salo, Taylor and Valabregue, Romain and Amlien, Inge K. and Liem, Franziskus and Jacoby, Nir and Stojić, Hrvoje and Cieslak, Matthew and Urchs, Sebastian and Halchenko, Yaroslav O. and Ghosh, Satrajit S. and De La Vega, Alejandro and Yarkoni, Tal and Wright, Jessey and Thompson, William H. and Poldrack, Russell A. and Gorgolewski, Krzysztof J.},
month = jul,
year = {2020},
keywords = {Computational neuroscience, Magnetic resonance imaging, Neurological models, Software},
pages = {2186--2202},
note = {Preprint: \doi{10.1101/694364}},
}
@article {Esteban2019,
author = {Esteban, Oscar and
Ciric, Rastko and
Finc, Karolina and
Blair, Ross and
Markiewicz, Christopher J. and
Moodie, Craig A. and
Kent, James D. and
Goncalves, Mathias and
DuPre, Elizabeth and
Gomez, Daniel E. P. and
Ye, Zhifang and
Salo, Taylor and
Valabregue, Romain and
Amlien, Inge K. and
Liem, Franziskus and
Jacoby, Nir and
Stoji{\'c}, Hrvoje and
Cieslak, Matthew and
Urchs, Sebastian and
Halchenko, Yaroslav O. and
Ghosh, Satrajit S. and
{de la Vega}, Alejandro and
Yarkoni, Tal and
Wright, Jessey and
Thompson, William H. and
Poldrack, Russell A. and
Gorgolewski, Krzysztof J.},
title = {Analysis of task-based functional MRI data preprocessed with fMRIPrep},
elocation-id = {694364},
year = {2019},
doi = {10.1101/694364},
abstract = {Functional magnetic resonance imaging (fMRI) is widely used to investigate the neural correlates of cognition. fMRI non-invasively measures brain activity, allowing identification of patterns evoked by tasks performed during scanning. Despite the long history of this technique, the idiosyncrasies of each dataset have led to the use of ad-hoc preprocessing protocols customized for nearly every different study. This approach is time-consuming, error-prone, and unsuitable for combining datasets from many sources. Here we showcase fMRIPrep, a robust preprocessing tool for virtually any human BOLD (blood-oxygen level dependent) fMRI dataset that addresses the reproducibility concerns of the established protocols for fMRI preprocessing. Based on standardizations of the input and output data specifications, fMRIPrep is capable of preprocessing a diversity of datasets without manual intervention. In support of the growing popularity of fMRIPrep, this protocol describes how to integrate the tool in a task-based fMRI investigation workflow.},
URL = {https://www.biorxiv.org/content/early/2019/07/08/694364},
eprint = {https://www.biorxiv.org/content/early/2019/07/08/694364.full.pdf},
journal = {bioRxiv}
}
@article{Yarkoni2019,
title={{PyBIDS}: Python tools for {BIDS} datasets},
author={Yarkoni, Tal and
Markiewicz, Christopher J and
{de la Vega}, Alejandro and
Gorgolewski, Krzysztof J and
Salo, Taylor and
Halchenko, Yarik and
McNamara, Quinten and
DeStasio, Krista and
Poline, Jean-Baptiste and
Petrov, Dmitry and
Hayot-Sasson, Val\'erie and
Nielson, Dylan M and
Carlin, Johan and
Kiar, Greg and
Whitaker, Kirstie and
DuPre, Elizabeth and
Wagner, Adina and
Tirrell, Lee and
Jas, Mainak and
Hanke, Michael and
Poldrack, Russell A and
Esteban, Oscar and
Appelhoff, Stefan and
Holdgraf, Chris and
Staden, Isla and
Thirion, Bertrand and
Kleinschmidt, Dave F and
Lee, John A and
{Visconti di Oleggio Castello}, Matteo and
Notter, Michael P and
Blair, Ross},
year={2019},
month=aug,
type = {JOUR},
journal={Journal of Open Source Software},
publisher = {The Open Journal},
volume = {4},
number = {40},
pages = {1294},
doi={10.21105/joss.01294},
}
@article{Poldrack2019,
title={The importance of standards for sharing of computational models and data},
author={Poldrack, Russell and
Feingold, Franklin and
Frank, Michael J and
Gleeson, Padraig and
de Hollander, Gilles and
Huys, Quentin JM and
Love, Bradley C and
Markiewicz, Christopher J and
Moran, Rosalyn and
Ritter, Petra and
Turner, Brandon M and
Yarkoni, Tal and
Zhan, Ming and
Cohen, Jonathan D},
year={2019},
month=dec,
type = {JOUR},
journal={Computational Brain \& Behavior},
volume = {2},
number = {3-4},
pages={229},
doi={10.1007/s42113-019-00062-x},
note = {Preprint: \doi{10.31234/osf.io/q3rnx}}
}
@presentation{Markiewicz2019a,
author = {Christopher J. Markiewicz},
title = {FitLins - Reproducible model estimation for fMRI},
year = {2019},
month = jun,
type = {Software demonstration},
organization = {Organization for Human Brain Mapping Annual Meeting},
address = {Rome, Italy},
}
@presentation{Markiewicz2019poster,
author = {Markiewicz, Christopher J and
{de la Vega}, Alejandro and
Yarkoni, Tal and
Poldrack, Russell A and
Gorgolewski, Krzysztof J},
title = {FitLins - Reproducible model estimation for fMRI},
year = {2019},
month = jun,
type = {poster},
organization = {Organization for Human Brain Mapping Annual Meeting},
address = {Rome, Italy},
}
@presentation{Markiewicz2019b,
author = {Christopher J. Markiewicz},
title = {fMRIPrep: A Robust fMRI Preprocessing Pipeline},
year = {2019},
month = oct,
type = {oral},
organization = {Athinoula A. Martinos Center for Biomedical Imaging},
address = {Boston, MA},
}
@presentation{Markiewicz2019c,
author = {Christopher J. Markiewicz},
title = {BIDS: The Brain Imaging Data Structure},
year = {2019},
month = oct,
type = {oral},
organization = {Athinoula A. Martinos Center for Biomedical Imaging},
address = {Boston, MA},
}
@presentation{Markiewicz2019d,
author = {Christopher J. Markiewicz},
title = {BIDS Apps Metadata},
year = {2019},
month = mar,
type = {oral},
organization = {Making Open Neuroscience Infrastructure Interoperable (MONII 2.0) Workshop},
address = {Montreal, QC, Canada},
url = {https://effigies.github.io/bids-metadata},
}
@presentation{Markiewicz2019e,
author = {Christopher J. Markiewicz},
title = {niflows - Reuse, Create, and Package your own Workflows},
year = {2019},
month = jan,
type = {oral},
organization = {Coastal Coding Workshop},
address = {Miami, FL},
url = {https://effigies.github.io/niflows-intro},
}
@presentation{Markiewicz2019f,
author = {Christopher J. Markiewicz},
title = {fMRIPrep - A Robust Preprocessing Pipeline for Functional MRI},
year = {2019},
month = aug,
type = {oral},
organization = {Neurohackademy 2019},
address = {Seattle, WA},
url = {https://effigies.github.io/fmriprep-demo},
}
@inproceedings{ pydra-proc-scipy-2020,
author = { Jarecka, Dorota and Goncalves, Mathias and Markiewicz, Christopher J and Esteban, Oscar and Lo, Nicole and Kaczmarzyk, Jakub and Ghosh, Satrajit },
title = { Pydra - a flexible and lightweight dataflow engine for scientific analyses },
booktitle = { Proceedings of the 19th {P}ython in {S}cience {C}onference },
pages = { 132 - 139 },
year = { 2020 },
editor = { {M}eghann {A}garwal and {C}hris {C}alloway and {D}illon {N}iederhut and {D}avid {S}hupe },
doi = { 10.25080/Majora-342d178e-012 }
}
@presentation{Markiewicz2020a,
author = {Christopher J. Markiewicz},
title = {Testing Scientific Software},
year = {2020},
month = may,
type = {oral},
organization = {Nilearn Dev Days 2020},
address = {Online},
url = {https://effigies.github.io/testing-scientific-software/},
}
@presentation{Markiewicz2020b,
author = {Christopher J. Markiewicz},
title = {The BIDS Ecosystem},
year = {2020},
month = may,
type = {oral},
organization = {Neuro Data Science},
address = {Montreal, QC, Canada (Online)},
url = {https://effigies.github.io/bids-ecosystem/},
}
@presentation{Markiewicz2020c,
author = {Christopher J. Markiewicz},
title = {BIDS Applications and Derivatives},
year = {2020},
month = nov,
type = {oral},
organization = {Open and Reproducible Neuroimaging},
address = {Oldenburg, Germany (Online)},
url = {https://effigies.github.io/bids-ecosystem/},
}
@presentation{Markiewicz2020d,
title={BIDS Derivatives – Standardizing processing results of neuroimaging data},
author = {Markiewicz, Christopher J and Appelhoff, Stefan and
Calhoun, Vince and Dickie, Erin W and Duff, Eugene and
DuPre, Elizabeth and Esteban, Oscar and Feingold, Franklin
and Ghosh, Satrajit and Halchenko, Yaroslav O and Harms,
Michael P and Herholz, Peer and Mennes, Maarten and
Nørgaard, Martin and Oostenveld, Robert and Pernet, Cyril
and Pestilli, Franco and Poldrack, Russell A and Rokem,
Ariel and Smith, Robert E and Yarkoni, Tal and Gorgolewski,
Krzysztof J},
month = jun,
year = 2020,
organization = {Organization for Human Brain Mapping Annual Meeting},
type = {poster},
address = {Online},
doi = {10.5281/zenodo.3941041},
url = {https://doi.org/10.5281/zenodo.3941041}
}
@presentation{Markiewicz2020e,
title={BIDS Derivatives – Standardizing processing results of neuroimaging data},
author = {Markiewicz, Christopher J and
Appelhoff, Stefan and
Calhoun, Vince and
Dickie, Erin W and
Duff, Eugene and
DuPre, Elizabeth and
Esteban, Oscar and
Feingold, Franklin and
Ghosh, Satrajit and
Halchenko, Yaroslav O and
Harms, Michael P and
Herholz, Peer and
Hermes, Dora and
Jas, Mainak and
Mennes, Maarten and
Nørgaard, Martin and
Oostenveld, Robert and
Pernet, Cyril and
Pestilli, Franco and
Poldrack, Russell A and
Rokem, Ariel and
Smith, Robert E and
Yarkoni, Tal and
Gorgolewski, Krzysztof J and
Niso, Guiomar},
month = oct,
year = 2020,
organization = {LiveMEEG 2020},
type = {poster},
address = {Online},
doi = {10.5281/zenodo.3941041},
url = {https://doi.org/10.5281/zenodo.3941041}
}
@presentation{Markiewicz2021NICC,
author = {Christopher J. Markiewicz},
title = {BIDS: The Brain Imaging Data Structure},
year = {2021},
month = jan,
type = {oral},
organization={Laboratory for NeuroImaging of Coma and Consciousness},
address = {Boston, MA (Online)},
}
@presentation{Adon2021,
TITLE = {{BIDS-prov: a provenance framework for BIDS}},
AUTHOR = {Adon, R{\'e}mi and Appelhoff, Stefan and Auer, Tibor and Guillo, Laurent and Halchenko, Yaroslav O and Keator, David and Markiewicz, Christopher J and Nichols, Thomas E and Poline, Jean-Baptiste and Ghosh, Satrajit and Maumet, Camille},
URL = {https://www.hal.inserm.fr/inserm-03478998},
organization = {Organization for Human Brain Mapping Annual Meeting},
ADDRESS = {Seoul, South Korea (Online)},
PAGES = {1-3},
YEAR = {2021},
MONTH = Jun,
PDF = {https://www.hal.inserm.fr/inserm-03478998/file/OHBM2021-Bids-prov.pdf},
type = {poster},
}
@presentation{Markiewicz2021poster,
title={BIDS Statistical Models - An implementation-independent representation of General Linear Models},
author={Markiewicz, Christopher J and Bottenhorn, Katherine L and Chen, Gang and De la Vega, Alejandro and Esteban, Oscar and Maumet, Camille and Nichols, Thomas E and Poldrack, Russell A and Poline, Jean-Baptiste and Yarkoni, Tal},
year={2021},
month=jun,
organization = {Organization for Human Brain Mapping Annual Meeting},
type = {poster},
address = {Seoul, South Korea (Online)},
}
@presentation{Esteban2021,
title={The Bermuda Triangle of d- and f-MRI sailors - software for susceptibility distortions (SDCFlows)},
url={osf.io/gy8nt},
doi={10.31219/osf.io/gy8nt},
author={Esteban, Oscar and Adebimpe, Azeez and Markiewicz, Christopher J and Goncalves, Mathias and Blair, Ross W and Cieslak, Matthew, PhD and Naveau, Mikaël and Sitek, Kevin R and Sneve, Markus H and Provins, Céline and MacNicol, Eilidh and Satterthwaite, Theodore D and Poldrack, Russell A},
year={2021},
month=jun,
organization = {Organization for Human Brain Mapping Annual Meeting},
type = {poster},
address = {Seoul, South Korea (Online)},
}
@article{Markiewicz2021,
title = {The {OpenNeuro} resource for sharing of neuroscience data},
volume = {10},
copyright = {All rights reserved},
issn = {2050-084X},
url = {https://doi.org/10.7554/eLife.71774},
doi = {10.7554/eLife.71774},
abstract = {The sharing of research data is essential to ensure reproducibility and maximize the impact of public investments in scientific research. Here, we describe OpenNeuro, a BRAIN Initiative data archive that provides the ability to openly share data from a broad range of brain imaging data types following the FAIR principles for data sharing. We highlight the importance of the Brain Imaging Data Structure standard for enabling effective curation, sharing, and reuse of data. The archive presently shares more than 600 datasets including data from more than 20,000 participants, comprising multiple species and measurement modalities and a broad range of phenotypes. The impact of the shared data is evident in a growing number of published reuses, currently totalling more than 150 publications. We conclude by describing plans for future development and integration with other ongoing open science efforts.},
urldate = {2022-01-11},
journal = {eLife},
author = {Markiewicz, Christopher J and Gorgolewski, Krzysztof J and Feingold, Franklin and Blair, Ross and Halchenko, Yaroslav O and Miller, Eric and Hardcastle, Nell and Wexler, Joe and Esteban, Oscar and Goncavles, Mathias and Jwa, Anita and Poldrack, Russell},
editor = {Kahnt, Thorsten and Baker, Chris I and Dosenbach, Nico and Hawrylycz, Michael J and Svoboda, Karel},
month = oct,
year = {2021},
keywords = {data sharing, EEG, MEG, MRI, neuroimaging, open science},
pages = {e71774},
}
@presentation{Feingold2021,
author = {Feingold, Franklin and Markiewicz, Christopher J and },
title = {The Brain Imaging Data Structure},
year = {2021},
month = aug,
type = {oral},
organization = {International Neuroscience Coordinating Facility},
address = {Oldenburg, Germany (Online)},
url = {https://effigies.github.io/bids-ecosystem/},
}
@article{DuPre2021,
doi = {10.21105/joss.03669},
url = {https://doi.org/10.21105/joss.03669},
year = {2021},
publisher = {The Open Journal},
volume = {6},
number = {66},
pages = {3669},
author = {Elizabeth DuPre and Taylor Salo and Zaki Ahmed and Peter A. Bandettini and Katherine L. Bottenhorn and César Caballero-Gaudes and Logan T. Dowdle and Javier Gonzalez-Castillo and Stephan Heunis and Prantik Kundu and Angela R. Laird and Ross Markello and Christopher J. Markiewicz and Stefano Moia and Isla Staden and Joshua B. Teves and Eneko Uruñuela and Maryam Vaziri-Pashkam and Kirstie Whitaker and Daniel A. Handwerker},
title = {TE-dependent analysis of multi-echo fMRI with tedana},
journal = {Journal of Open Source Software}
}
@article{Gau2021,
title = {Brainhack: {Developing} a culture of open, inclusive, community-driven neuroscience},
volume = {109},
copyright = {All rights reserved},
issn = {0896-6273},
shorttitle = {Brainhack},
url = {https://www.sciencedirect.com/science/article/pii/S0896627321002312},
doi = {10.1016/j.neuron.2021.04.001},
abstract = {Brainhack is an innovative meeting format that promotes scientific collaboration and education in an open, inclusive environment. This NeuroView describes the myriad benefits for participants and the research community and how Brainhacks complement conventional formats to augment scientific progress.},
language = {en},
number = {11},
urldate = {2022-01-11},
journal = {Neuron},
author = {Gau, Rémi and Noble, Stephanie and Heuer, Katja and Bottenhorn, Katherine L. and Bilgin, Isil P. and Yang, Yu-Fang and Huntenburg, Julia M. and Bayer, Johanna M. M. and Bethlehem, Richard A. I. and Rhoads, Shawn A. and Vogelbacher, Christoph and Borghesani, Valentina and Levitis, Elizabeth and Wang, Hao-Ting and Van Den Bossche, Sofie and Kobeleva, Xenia and Legarreta, Jon Haitz and Guay, Samuel and Atay, Selim Melvin and Varoquaux, Gael P. and Huijser, Dorien C. and Sandström, Malin S. and Herholz, Peer and Nastase, Samuel A. and Badhwar, AmanPreet and Dumas, Guillaume and Schwab, Simon and Moia, Stefano and Dayan, Michael and Bassil, Yasmine and Brooks, Paula P. and Mancini, Matteo and Shine, James M. and O’Connor, David and Xie, Xihe and Poggiali, Davide and Friedrich, Patrick and Heinsfeld, Anibal S. and Riedl, Lydia and Toro, Roberto and Caballero-Gaudes, César and Eklund, Anders and Garner, Kelly G. and Nolan, Christopher R. and Demeter, Damion V. and Barrios, Fernando A. and Merchant, Junaid S. and McDevitt, Elizabeth A. and Oostenveld, Robert and Craddock, R. Cameron and Rokem, Ariel and Doyle, Andrew and Ghosh, Satrajit S. and Nikolaidis, Aki and Stanley, Olivia W. and Uruñuela, Eneko and Anousheh, Nasim and Arnatkeviciute, Aurina and Auzias, Guillaume and Bachar, Dipankar and Bannier, Elise and Basanisi, Ruggero and Basavaraj, Arshitha and Bedini, Marco and Bellec, Pierre and Benn, R. Austin and Berluti, Kathryn and Bollmann, Steffen and Bollmann, Saskia and Bradley, Claire and Brown, Jesse and Buchweitz, Augusto and Callahan, Patrick and Chan, Micaela Y. and Chandio, Bramsh Q. and Cheng, Theresa and Chopra, Sidhant and Chung, Ai Wern and Close, Thomas G. and Combrisson, Etienne and Cona, Giorgia and Constable, R. Todd and Cury, Claire and Dadi, Kamalaker and Damasceno, Pablo F. and Das, Samir and De Vico Fallani, Fabrizio and DeStasio, Krista and Dickie, Erin W. and Dorfschmidt, Lena and Duff, Eugene P. and DuPre, Elizabeth and Dziura, Sarah and Esper, Nathalia B. and Esteban, Oscar and Fadnavis, Shreyas and Flandin, Guillaume and Flannery, Jessica E. and Flournoy, John and Forkel, Stephanie J. and Franco, Alexandre R. and Ganesan, Saampras and Gao, Siyuan and García Alanis, José C. and Garyfallidis, Eleftherios and Glatard, Tristan and Glerean, Enrico and Gonzalez-Castillo, Javier and Gould van Praag, Cassandra D. and Greene, Abigail S. and Gupta, Geetika and Hahn, Catherine Alice and Halchenko, Yaroslav O. and Handwerker, Daniel and Hartmann, Thomas S. and Hayot-Sasson, Valérie and Heunis, Stephan and Hoffstaedter, Felix and Hohmann, Daniela M. and Horien, Corey and Ioanas, Horea-Ioan and Iordan, Alexandru and Jiang, Chao and Joseph, Michael and Kai, Jason and Karakuzu, Agah and Kennedy, David N. and Keshavan, Anisha and Khan, Ali R. and Kiar, Gregory and Klink, P. Christiaan and Koppelmans, Vincent and Koudoro, Serge and Laird, Angela R. and Langs, Georg and Laws, Marissa and Licandro, Roxane and Liew, Sook-Lei and Lipic, Tomislav and Litinas, Krisanne and Lurie, Daniel J. and Lussier, Désirée and Madan, Christopher R. and Mais, Lea-Theresa and Mansour L, Sina and Manzano-Patron, J. P. and Maoutsa, Dimitra and Marcon, Matheus and Margulies, Daniel S. and Marinato, Giorgio and Marinazzo, Daniele and Markiewicz, Christopher J. and Maumet, Camille and Meneguzzi, Felipe and Meunier, David and Milham, Michael P. and Mills, Kathryn L. and Momi, Davide and Moreau, Clara A. and Motala, Aysha and Moxon-Emre, Iska and Nichols, Thomas E. and Nielson, Dylan M. and Nilsonne, Gustav and Novello, Lisa and O’Brien, Caroline and Olafson, Emily and Oliver, Lindsay D. and Onofrey, John A. and Orchard, Edwina R. and Oudyk, Kendra and Park, Patrick J. and Parsapoor, Mahboobeh and Pasquini, Lorenzo and Peltier, Scott and Pernet, Cyril R. and Pienaar, Rudolph and Pinheiro-Chagas, Pedro and Poline, Jean-Baptiste and Qiu, Anqi and Quendera, Tiago and Rice, Laura C. and Rocha-Hidalgo, Joscelin and Rutherford, Saige and Scharinger, Mathias and Scheinost, Dustin and Shariq, Deena and Shaw, Thomas B. and Siless, Viviana and Simmonite, Molly and Sirmpilatze, Nikoloz and Spence, Hayli and Sprenger, Julia and Stajduhar, Andrija and Szinte, Martin and Takerkart, Sylvain and Tam, Angela and Tejavibulya, Link and Thiebaut de Schotten, Michel and Thome, Ina and Tomaz da Silva, Laura and Traut, Nicolas and Uddin, Lucina Q. and Vallesi, Antonino and VanMeter, John W. and Vijayakumar, Nandita and di Oleggio Castello, Matteo Visconti and Vohryzek, Jakub and Vukojević, Jakša and Whitaker, Kirstie Jane and Whitmore, Lucy and Wideman, Steve and Witt, Suzanne T. and Xie, Hua and Xu, Ting and Yan, Chao-Gan and Yeh, Fang-Cheng and Yeo, B. T. Thomas and Zuo, Xi-Nian},
month = jun,
year = {2021},
keywords = {open science, best practices, Brainhack, collaboration, community building, hackathon, inclusivity, neuroscience, reproducibility, training},
pages = {1769--1775},
}
@article{Halchenko2021,
title = {{DataLad}: distributed system for joint management of code, data, and their relationship},
volume = {6},
issn = {2475-9066},
shorttitle = {{DataLad}},
url = {https://joss.theoj.org/papers/10.21105/joss.03262},
doi = {10.21105/joss.03262},
abstract = {Halchenko et al., (2021). DataLad: distributed system for joint management of code, data, and their relationship. Journal of Open Source Software, 6(63), 3262, https://doi.org/10.21105/joss.03262},
language = {en},
number = {63},
urldate = {2022-01-11},
journal = {Journal of Open Source Software},
author = {Halchenko, Yaroslav O. and Meyer, Kyle and Poldrack, Benjamin and Solanky, Debanjum Singh and Wagner, Adina S. and Gors, Jason and MacFarlane, Dave and Pustina, Dorian and Sochat, Vanessa and Ghosh, Satrajit S. and Mönch, Christian and Markiewicz, Christopher J. and Waite, Laura and Shlyakhter, Ilya and de la Vega, Alejandro and Hayashi, Soichi and Häusler, Christian Olaf and Poline, Jean-Baptiste and Kadelka, Tobias and Skytén, Kusti and Jarecka, Dorota and Kennedy, David and Strauss, Ted and Cieslak, Matt and Vavra, Peter and Ioanas, Horea-Ioan and Schneider, Robin and Pflüger, Mika and Haxby, James V. and Eickhoff, Simon B. and Hanke, Michael},
month = jul,
year = {2021},
pages = {3262},
}
@article{Hanke2021,
title = {In defense of decentralized research data management},
volume = {27},
copyright = {All rights reserved},
issn = {1868-856X},
url = {http://www.degruyter.com/document/doi/10.1515/nf-2020-0037/html},
doi = {10.1515/nf-2020-0037},
abstract = {Decentralized research data management (dRDM) systems handle digital research objects across participating nodes without critically relying on central services. We present four perspectives in defense of dRDM, illustrating that, in contrast to centralized or federated research data management solutions, a dRDM system based on heterogeneous but interoperable components can offer a sustainable, resilient, inclusive, and adaptive infrastructure for scientific stakeholders: An individual scientist or laboratory, a research institute, a domain data archive or cloud computing platform, and a collaborative multisite consortium. All perspectives share the use of a common, self-contained, portable data structure as an abstraction from current technology and service choices. In conjunction, the four perspectives review how varying requirements of independent scientific stakeholders can be addressed by a scalable, uniform dRDM solution and present a working system as an exemplary implementation.},
language = {en},
number = {1},
urldate = {2022-01-11},
journal = {Neuroforum},
author = {Hanke, Michael and Pestilli, Franco and Wagner, Adina S. and Markiewicz, Christopher J. and Poline, Jean-Baptiste and Halchenko, Yaroslav O.},
month = feb,
year = {2021},
keywords = {BrainLife, Canadian Open Neuroscience Platform, DataLad, Interoperability, OpenNeuro},
pages = {17--25},
}
@article{Goncalves2021,
title = {{NiTransforms}: {A} {Python} tool to read, represent, manipulate, and apply $n$-dimensional spatial transforms},
volume = {6},
copyright = {All rights reserved},
issn = {2475-9066},
shorttitle = {{NiTransforms}},
url = {https://joss.theoj.org/papers/10.21105/joss.03459},
doi = {10.21105/joss.03459},
abstract = {Goncalves et al., (2021). NiTransforms: A Python tool to read, represent, manipulate, and apply \$n\$-dimensional spatial transforms. Journal of Open Source Software, 6(65), 3459, https://doi.org/10.21105/joss.03459},
language = {en},
number = {65},
urldate = {2022-01-11},
journal = {Journal of Open Source Software},
author = {Goncalves, Mathias and Markiewicz, Christopher J. and Moia, Stefano and Ghosh, Satrajit S. and Poldrack, Russell A. and Esteban, Oscar},
month = sep,
year = {2021},
pages = {3459},
}
@techreport{Ciric2021,
title = {{TemplateFlow}: {FAIR}-sharing of multi-scale, multi-species brain models},
shorttitle = {{TemplateFlow}},
url = {https://www.biorxiv.org/content/10.1101/2021.02.10.430678v3},
abstract = {Reference anatomies of the brain and corresponding atlases play a central role in experimental neuroimaging workflows and are the foundation for reporting standardized results. The choice of such references —i.e., templates— and atlases is one relevant source of methodological variability across studies, which has recently been brought to attention as an important challenge to reproducibility in neuroscience. TemplateFlow is a publicly available framework for human and nonhuman brain models. The framework combines an open database with software for access, management, and vetting, allowing scientists to distribute their resources under FAIR —findable, accessible, interoperable, reusable— principles. TemplateFlow supports a multifaceted insight into brains across species, and enables multiverse analyses testing whether results generalize across standard references, scales, and in the long term, species, thereby contributing to increasing the reliability of neuroimaging results.},
language = {en},
urldate = {2022-01-11},
author = {Ciric, Rastko and Thompson, William H. and Lorenz, Romy and Goncalves, Mathias and MacNicol, Eilidh and Markiewicz, Christopher J. and Halchenko, Yaroslav O. and Ghosh, Satrajit S. and Gorgolewski, Krzysztof J. and Poldrack, Russell A. and Esteban, Oscar},
month = aug,
year = {2021},
doi = {10.1101/2021.02.10.430678},
pages = {2021.02.10.430678},
}
@techreport{Bansal2021,
title = {High-sensitivity detection of facial features on {MRI} brain scans with a convolutional network},
url = {https://www.biorxiv.org/content/10.1101/2021.04.25.441373v1},
abstract = {Platforms and institutions that support MRI data sharing need to ensure that identifiable facial features are not present in shared images. Currently, this assessment requires manual effect as no auto-mated tools exist that can efficiently and accurately detect if an image has been “defaced”. The scarcity of publicly available data with pre-served facial features, as well as the meager incentives to create such a cohort privately, have averted the development of face-detection models. Here, we introduce a framework to detect whether an input MRI brain scan has been defaced, with the ultimate goal of streamlining it within the submission protocols of MRI data archiving and sharing platforms. We present a binary (defaced/”nondefaced”) classifier based on a custom convolutional neural network architecture. We train the model on 980 de-faced MRI scans from 36 different studies that are publicly available at OpenNeuro.org. To overcome the unavailability of nondefaced examples, we augment the dataset by inpainting synthetic faces into each training image. We show the adequacy of such a data augmentation in a cross-validation evaluation. We demonstrate the performance estimated with cross-validation matches that of an evaluation on a held-out dataset (N =581) preserving real faces, and obtain accuracy/sensitivity/speci-ficity scores of 0.978/0.983/0.972, respectively. Data augmentations are key to boosting the performance of models bounded by limited sample sizes and insufficient diversity. Our model contributes towards developing classifiers with ∼100\% sensitivity detecting faces, which is crucial to ensure that no identifiable data are inadvertently made public.},
language = {en},
urldate = {2022-01-11},
author = {Bansal, Shashank and Kori, Avinash and Zulfikar, Wazeer and Wexler, Joseph and Markiewicz, Christopher J. and Feingold, Franklin F. and Poldrack, Russell A. and Esteban, Oscar},
month = apr,
year = {2021},
doi = {10.1101/2021.04.25.441373},
pages = {2021.04.25.441373},
}
@techreport{Norgaard2021,
title = {{PET}-{BIDS}, an extension to the brain imaging data structure for positron emission tomography},
url = {https://www.biorxiv.org/content/10.1101/2021.06.16.448390v1},
abstract = {The Brain Imaging Data Structure (BIDS) is a standard for organizing and describing neuroimaging datasets. It serves not only to facilitate the process of data sharing and aggregation, but also to simplify the application and development of new methods and software for working with neuroimaging data. Here, we present an extension of BIDS to include positron emission tomography (PET) data (PET-BIDS). We describe the PET-BIDS standard in detail and share several open-access datasets curated following PET-BIDS. Additionally, we highlight several tools which are already available for converting, validating and analyzing PET-BIDS datasets.},
language = {en},
urldate = {2022-01-11},
author = {Norgaard, Martin and Matheson, Granville J. and Hansen, Hanne D. and Thomas, Adam and Searle, Graham and Rizzo, Gaia and Veronese, Mattia and Giacomel, Alessio and Yaqub, Maqsood and Tonietto, Matteo and Funck, Thomas and Gillman, Ashley and Boniface, Hugo and Routier, Alexandre and Dalenberg, Jelle R. and Betthauser, Tobey and Feingold, Franklin and Markiewicz, Christopher J. and Gorgolewski, Krzysztof J. and Blair, Ross W. and Appelhoff, Stefan and Gau, Remi and Salo, Taylor and Niso, Guiomar and Pernet, Cyril and Phillips, Christophe and Oostenveld, Robert and Gallezot, Jean-Dominique and Carson, Richard E. and Knudsen, Gitte M. and Innis, Robert B. and Ganz, Melanie},
month = jun,
year = {2021},
doi = {10.1101/2021.06.16.448390},
pages = {2021.06.16.448390},
}
@techreport{Karakuzu2021,
title = {{qMRI}-{BIDS}: an extension to the brain imaging data structure for quantitative magnetic resonance imaging data},
shorttitle = {{qMRI}-{BIDS}},
url = {https://www.medrxiv.org/content/10.1101/2021.10.22.21265382v3},
abstract = {The Brain Imaging Data Structure (BIDS) established community consensus on the organization of data and metadata for several neuroimaging modalities. Traditionally, BIDS had a strong focus on functional magnetic resonance imaging (MRI) datasets and lacked guidance on how to store multimodal structural MRI datasets. Here, we present and describe the BIDS Extension Proposal 001 (BEP001), which adds a range of quantitative MRI (qMRI) applications to the BIDS application sphere. In general, the aim of qMRI is to characterize brain microstructure by quantifying the physical MR parameters of the tissue via computational, biophysical models. By proposing this new standard, we envision standardization of qMRI which makes multicenter dissemination of interoperable data possible. As a result, BIDS can act as a catalyst of convergence between qMRI methods development and application-driven neuroimaging studies that can help develop quantitative biomarkers for neural tissue characterization. Finally, our BIDS extension offers a common ground for developers to exchange novel imaging data and tools, reducing the practical barriers to standardization that is currently lacking in the field of neuroimaging.},
language = {en},
urldate = {2022-01-11},
author = {Karakuzu, Agah and Appelhoff, Stefan and Auer, Tibor and Boudreau, Mathieu and Feingold, Franklin and Khan, Ali R. and Lazari, Alberto and Markiewicz, Christopher J. and Mulder, Martijn J. and Phillips, Christophe and Salo, Taylor and Stikov, Nikola and Whitaker, Kirstie and Hollander, Gilles de},
month = oct,
year = {2021},
doi = {10.1101/2021.10.22.21265382},
pages = {2021.10.22.21265382},
}
@inproceedings{Esteban2020b,
title = {Software {Tool} to {Read}, {Represent}, {Manipulate}, and {Apply} {N}-{Dimensional} {Spatial} {Transforms}},
copyright = {All rights reserved},
doi = {10.1109/ISBI45749.2020.9098466},
abstract = {Spatial transforms formalize mappings between coordinates of objects in biomedical images. Transforms typically are the outcome of image registration methodologies, which estimate the alignment between two images. Image registration is a prominent task present in nearly all standard image processing and analysis pipelines. The proliferation of software implementations of image registration methodologies has resulted in a spread of data structures and file formats used to preserve and communicate transforms. This segregation of formats hinders the compatibility between tools and endangers the reproducibility of results. We propose a software tool capable of converting between formats and resampling images to apply transforms generated by the most popular neuroimaging packages and libraries (AFNI, FSL, FreeSurfer, ITK, and SPM). The proposed software is subject to continuous integration tests to check the compatibility with each supported tool after every change to the code base (https://github.com/poldracklab/nitransforms). Compatibility between software tools and imaging formats is a necessary bridge to ensure the reproducibility of results and enable the optimization and evaluation of current image processing and analysis workflows.},
booktitle = {2020 {IEEE} 17th {International} {Symposium} on {Biomedical} {Imaging} ({ISBI})},
author = {Esteban, O. and Goncalves, M. and Markiewicz, C. J. and Ghosh, S. S. and Poldrack, R. A.},
month = apr,
year = {2020},
keywords = {BIDS, image registration, Image registration, Pipelines, software infrastructure, Software tools, spatial transforms, Standards, Tools, Transforms},
pages = {709--712},
}
@article{Moreau2020,
title = {The genetics-{BIDS} extension: {Easing} the search for genetic data associated with human brain imaging},
volume = {9},
copyright = {All rights reserved},
issn = {2047-217X},
shorttitle = {The genetics-{BIDS} extension},
url = {https://doi.org/10.1093/gigascience/giaa104},
doi = {10.1093/gigascience/giaa104},
abstract = {Metadata are what makes databases searchable. Without them, researchers would have difficulty finding data with features they are interested in. Brain imaging genetics is at the intersection of two disciplines, each with dedicated dictionaries and ontologies facilitating data search and analysis. Here, we present the genetics Brain Imaging Data Structure extension, consisting of metadata files for human brain imaging data to which they are linked, and describe succinctly the genomic and transcriptomic data associated with them, which may be in different databases. This extension will facilitate identifying micro-scale molecular features that are linked to macro-scale imaging repositories, facilitating data aggregation across studies.},
number = {10},
urldate = {2022-01-11},
journal = {GigaScience},
author = {Moreau, Clara A and Jean-Louis, Martineau and Blair, Ross and Markiewicz, Christopher J and Turner, Jessica A and Calhoun, Vince D and Nichols, Thomas E and Pernet, Cyril R},
month = oct,
year = {2020},
pages = {giaa104},
}
@misc{PyMVPA2_4_1,
author = {Yaroslav Halchenko and
Michael Hanke and
Nikolaas N. Oosterhof and
Emanuele Olivetti and
Per B. Sederberg and
Swaroop Guntupalli and
Tiziano Zito and
Valentin Haenel and
Sven Buchholz and
Richard Dinga and
Arman Eshaghi and
David Armstrong and
Adam Riggall and
Christoph Gohlke and
Chris Markiewicz and
Michael Notter and
Matthias Ekman and
Cameron Chen and
Kelsey Wheeler and
Satrajit Ghosh and
Reka Daniel-Weiner and
Matteo Visconti di Oleggio Castello and
Geethapriya Raghavan and
Andrew Connolly and
Feilong Ma},
title = {PyMVPA: 2.4.1},
month = nov,
year = 2015,
doi = {10.5281/zenodo.33988},
url = {http://dx.doi.org/10.5281/zenodo.33988}
}
@misc{PySurfer0_9,
author = {Michael Waskom and
Eric Larson and
Christian Brodbeck and
Alexandre Gramfort and
Scott Burns and
Martin Luessi and
Christoph T. Weidemann and
Sebastian Bitzer and
Chris Markiewicz and
Roan LaPlante and
Denis A. Engemann and
Yaroslav Halchenko and
Satrajit Ghosh and
Natalie Klein and
Diego Angulo and
Marijn van Vliet and
Gio Piantoni and
Matthew Brett and
Laura Gwilliams},
title = {PySurfer: 0.9.0},
month = oct,
year = 2018,
doi = {10.5281/zenodo.1443483},
url = {https://doi.org/10.5281/zenodo.1443483}
}
@Article{Esteban2019a,
DOI = {10.31219/osf.io/8aq7b},
URL = {https://doi.org/10.31219/osf.io/8aq7b},
Year = 2019,
Month = oct,
Publisher = {Center for Open Science},
Author = {Oscar Esteban and Mathias Goncalves and Christopher
Johnson Markiewicz and Satrajit S Ghosh and Russell
Poldrack},
Title = {Software tool to read, represent, manipulate, and apply
n-dimensional spatial transforms}
}
@Article{Provins2022,
DOI = {10.31219/osf.io/hz52v},
URL = {https://doi.org/10.31219/osf.io/hz52v},
Year = 2022,
Month = jan,
Publisher = {Center for Open Science},
Author = {C{\'{e}}line Provins and Christopher Johnson Markiewicz
and Rastko Ciric and Mathias Goncalves and C{\'{e}}sar
Caballero-Gaudes and Russell Poldrack and Patric Hagmann
and Oscar Esteban},
Title = {Quality control and nuisance regression of {fMRI}, looking
out where signal should not be found}
}
@article{Karakuzu2022,
abstract = {The Brain Imaging Data Structure (BIDS) established community consensus on the organization of data and metadata for several neuroimaging modalities. Traditionally, BIDS had a strong focus on functional magnetic resonance imaging (MRI) datasets and lacked guidance on how to store multimodal structural MRI datasets. Here, we present and describe the BIDS Extension Proposal 001 (BEP001), which adds a range of quantitative MRI (qMRI) applications to the BIDS. In general, the aim of qMRI is to characterize brain microstructure by quantifying the physical MR parameters of the tissue via computational, biophysical models. By proposing this new standard, we envision standardization of qMRI through multicenter dissemination of interoperable datasets. This way, BIDS can act as a catalyst of convergence between qMRI methods development and application-driven neuroimaging studies that can help develop quantitative biomarkers for neural tissue characterization. In conclusion, this BIDS extension offers a common ground for developers to exchange novel imaging data and tools, reducing the entrance barrier for qMRI in the field of neuroimaging.},
author = {A. Karakuzu and S. Appelhoff and T. Auer and M. Boudreau and F. Feingold and A.R. Khan and A. Lazari and C. Markiewicz and M. Mulder and C. Phillips and T. Salo and N. Stikov and K. Whitaker and G. de Hollander},
doi = {10.1038/s41597-022-01571-4},
issn = {20524463},
issue = {1},
journal = {Scientific Data},
title = {qMRI-BIDS: An extension to the brain imaging data structure for quantitative magnetic resonance imaging data},
volume = {9},
year = {2022},
}
@article{Ciric2022,
abstract = {<p>Reference anatomies of the brain (‘templates’) and corresponding atlases are the foundation for reporting standardized neuroimaging results. Currently, there is no registry of templates and atlases; therefore, the redistribution of these resources occurs either bundled within existing software or in ad hoc ways such as downloads from institutional sites and general-purpose data repositories. We introduce TemplateFlow as a publicly available framework for human and non-human brain models. The framework combines an open database with software for access, management, and vetting, allowing scientists to share their resources under FAIR—findable, accessible, interoperable, and reusable—principles. TemplateFlow enables multifaceted insights into brains across species, and supports multiverse analyses testing whether results generalize across standard references, scales, and in the long term, species.</p>},
author = {Rastko Ciric and William H. Thompson and Romy Lorenz and Mathias Goncalves and Eilidh E. MacNicol and Christopher J. Markiewicz and Yaroslav O. Halchenko and Satrajit S. Ghosh and Krzysztof J. Gorgolewski and Russell A. Poldrack and Oscar Esteban},
doi = {10.1038/s41592-022-01681-2},
issn = {1548-7091},
issue = {12},
journal = {Nature Methods},
month = dec,
pages = {1568-1571},
title = {TemplateFlow: FAIR-sharing of multi-scale, multi-species brain models},
volume = {19},
year = {2022},
}
@article{Norgaard2022,
author = {Martin Norgaard and Granville J. Matheson and Hanne D. Hansen and Adam Thomas and Graham Searle and Gaia Rizzo and Mattia Veronese and Alessio Giacomel and Maqsood Yaqub and Matteo Tonietto and Thomas Funck and Ashley Gillman and Hugo Boniface and Alexandre Routier and Jelle R. Dalenberg and Tobey Betthauser and Franklin Feingold and Christopher J. Markiewicz and Krzysztof J. Gorgolewski and Ross W. Blair and Stefan Appelhoff and Remi Gau and Taylor Salo and Guiomar Niso and Cyril Pernet and Christophe Phillips and Robert Oostenveld and Jean-Dominique Gallezot and Richard E. Carson and Gitte M. Knudsen and Robert B. Innis and Melanie Ganz},
doi = {10.1038/s41597-022-01164-1},
issn = {2052-4463},
issue = {1},
journal = {Scientific Data},
month = mar,
pages = {65},
title = {PET-BIDS, an extension to the brain imaging data structure for positron emission tomography},
volume = {9},
year = {2022},
}
@article{Clement2022,
abstract = {<p>Arterial spin labeling (ASL) is a non-invasive MRI technique that allows for quantitative measurement of cerebral perfusion. Incomplete or inaccurate reporting of acquisition parameters complicates quantification, analysis, and sharing of ASL data, particularly for studies across multiple sites, platforms, and ASL methods. There is a strong need for standardization of ASL data storage, including acquisition metadata. Recently, ASL-BIDS, the BIDS extension for ASL, was developed and released in BIDS 1.5.0. This manuscript provides an overview of the development and design choices of this first ASL-BIDS extension, which is mainly aimed at clinical ASL applications. Discussed are the structure of the ASL data, focussing on storage order of the ASL time series and implementation of calibration approaches, unit scaling, ASL-related BIDS fields, and storage of the labeling plane information. Additionally, an overview of ASL-BIDS compatible conversion and ASL analysis software and ASL example datasets in BIDS format is provided. We anticipate that large-scale adoption of ASL-BIDS will improve the reproducibility of ASL research.</p>},
author = {Patricia Clement and Marco Castellaro and Thomas W. Okell and David L. Thomas and Pieter Vandemaele and Sara Elgayar and Aaron Oliver-Taylor and Thomas Kirk and Joseph G. Woods and Sjoerd B. Vos and Joost P. A. Kuijer and Eric Achten and Matthias J. P. van Osch and Stefan Appelhoff and Ross Blair and Franklin Feingold and Rémi Gau and Christopher J. Markiewicz and Taylor Salo and John A. Detre and Hanzhang Lu and David C. Alsop and Michael A. Chappell and Luis Hernandez-Garcia and Jan Petr and Henk J. M. M. Mutsaerts},
doi = {10.1038/s41597-022-01615-9},
issn = {2052-4463},
issue = {1},
journal = {Scientific Data},
month = sep,
pages = {543},
title = {ASL-BIDS, the brain imaging data structure extension for arterial spin labeling},
volume = {9},
year = {2022},
}
@article{Niso2022,
author = {Guiomar Niso and Rotem Botvinik-Nezer and Stefan Appelhoff and Alejandro De La Vega and Oscar Esteban and Joset A. Etzel and Karolina Finc and Melanie Ganz and Rémi Gau and Yaroslav O. Halchenko and Peer Herholz and Agah Karakuzu and David B. Keator and Christopher J. Markiewicz and Camille Maumet and Cyril R. Pernet and Franco Pestilli and Nazek Queder and Tina Schmitt and Weronika Sójka and Adina S. Wagner and Kirstie J. Whitaker and Jochem W. Rieger},
doi = {10.1016/j.neuroimage.2022.119623},
issn = {10538119},
journal = {NeuroImage},
month = nov,
pages = {119623},
title = {Open and reproducible neuroimaging: From study inception to publication},
volume = {263},
year = {2022},
}
@article{Bourget2022,
abstract = {<p> The Brain Imaging Data Structure (BIDS) is a specification for organizing, sharing, and archiving neuroimaging data and metadata in a reusable way. First developed for magnetic resonance imaging (MRI) datasets, the community-led specification evolved rapidly to include other modalities such as magnetoencephalography, positron emission tomography, and quantitative MRI (qMRI). In this work, we present an extension to BIDS for microscopy imaging data, along with example datasets. Microscopy-BIDS supports common imaging methods, including 2D/3D, <italic>ex</italic> / <italic>in vivo</italic> , micro-CT, and optical and electron microscopy. Microscopy-BIDS also includes comprehensible metadata definitions for hardware, image acquisition, and sample properties. This extension will facilitate future harmonization efforts in the context of multi-modal, multi-scale imaging such as the characterization of tissue microstructure with qMRI. </p>},
author = {Marie-Hélène Bourget and Lee Kamentsky and Satrajit S. Ghosh and Giacomo Mazzamuto and Alberto Lazari and Christopher J. Markiewicz and Robert Oostenveld and Guiomar Niso and Yaroslav O. Halchenko and Ilona Lipp and Sylvain Takerkart and Paule-Joanne Toussaint and Ali R. Khan and Gustav Nilsonne and Filippo Maria Castelli and Julien Cohen-Adad},
doi = {10.3389/fnins.2022.871228},
issn = {1662-453X},
journal = {Frontiers in Neuroscience},
month = apr,
title = {Microscopy-BIDS: An Extension to the Brain Imaging Data Structure for Microscopy Data},
volume = {16},
year = {2022},
}
@article{Vega2022,
abstract = {<p>Functional magnetic resonance imaging (fMRI) has revolutionized cognitive neuroscience, but methodological barriers limit the generalizability of findings from the lab to the real world. Here, we present Neuroscout, an end-to-end platform for analysis of naturalistic fMRI data designed to facilitate the adoption of robust and generalizable research practices. Neuroscout leverages state-of-the-art machine learning models to automatically annotate stimuli from dozens of fMRI studies using naturalistic stimuli—such as movies and narratives—allowing researchers to easily test neuroscientific hypotheses across multiple ecologically-valid datasets. In addition, Neuroscout builds on a robust ecosystem of open tools and standards to provide an easy-to-use analysis builder and a fully automated execution engine that reduce the burden of reproducible research. Through a series of meta-analytic case studies, we validate the automatic feature extraction approach and demonstrate its potential to support more robust fMRI research. Owing to its ease of use and a high degree of automation, Neuroscout makes it possible to overcome modeling challenges commonly arising in naturalistic analysis and to easily scale analyses within and across datasets, democratizing generalizable fMRI research.</p>},
author = {Alejandro de la Vega and Roberta Rocca and Ross W Blair and Christopher J Markiewicz and Jeff Mentch and James D Kent and Peer Herholz and Satrajit S Ghosh and Russell A Poldrack and Tal Yarkoni},
doi = {10.7554/eLife.79277},
issn = {2050-084X},
journal = {eLife},
month = aug,
title = {Neuroscout, a unified platform for generalizable and reproducible fMRI research},
volume = {11},
year = {2022},
}
@presentation{Markiewicz2022Brainhack,
author = {Christopher J. Markiewicz},
title = {OpenNeuro: An open repository for neuroimaging data sharing},
year = {2022},
month = jun,
type = {oral},
organization={Brainhack},
address = {Glasgow, UK},
}