% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{EstradaLeon:162724,
author = {Estrada Leon, Edgar Santiago and Lu, Ran and Diers, Kersten
and Zeng, Weiyi and Ehses, Philipp and Stöcker, Tony and
Breteler, Monique and Reuter, Martin},
title = {{A}utomated olfactory bulb segmentation on high
resolutional {T}2-weighted {MRI}.},
journal = {NeuroImage},
volume = {242},
issn = {1053-8119},
address = {Orlando, Fla.},
publisher = {Academic Press},
reportid = {DZNE-2021-01381},
pages = {118464},
year = {2021},
note = {CC BY},
abstract = {The neuroimage analysis community has neglected the
automated segmentation of the olfactory bulb (OB) despite
its crucial role in olfactory function. The lack of an
automatic processing method for the OB can be explained by
its challenging properties (small size, location, and poor
visibility on traditional MRI scans). Nonetheless, recent
advances in MRI acquisition techniques and resolution have
allowed raters to generate more reliable manual annotations.
Furthermore, the high accuracy of deep learning methods for
solving semantic segmentation problems provides us with an
option to reliably assess even small structures. In this
work, we introduce a novel, fast, and fully automated deep
learning pipeline to accurately segment OB tissue on
sub-millimeter T2-weighted (T2w) whole-brain MR images. To
this end, we designed a three-stage pipeline: (1)
Localization of a region containing both OBs using
FastSurferCNN, (2) Segmentation of OB tissue within the
localized region through four independent AttFastSurferCNN -
a novel deep learning architecture with a self-attention
mechanism to improve modeling of contextual information, and
(3) Ensemble of the predicted label maps. For this work,
both OBs were manually annotated in a total of 620 T2w
images for training (n=357) and testing. The OB pipeline
exhibits high performance in terms of boundary delineation,
OB localization, and volume estimation across a wide range
of ages in 203 participants of the Rhineland Study (Dice
Score (Dice): 0.852, Volume Similarity (VS): 0.910, and
Average Hausdorff Distance (AVD): 0.215 mm). Moreover, it
also generalizes to scans of an independent dataset never
encountered during training, the Human Connectome
Project (HCP), with different acquisition parameters and
demographics, evaluated in 30 cases at the native 0.7 mm
HCP resolution (Dice: 0.738, VS: 0.790, and AVD: 0.340 mm),
and the default 0.8 mm pipeline resolution (Dice: 0.782,
VS: 0.858, and AVD: 0.268 mm). We extensively validated our
pipeline not only with respect to segmentation accuracy but
also to known OB volume effects, where it can sensitively
replicate age effects (β=-0.232, p<.01). Furthermore, our
method can analyze a 3D volume in less than a minute (GPU)
in an end-to-end fashion, providing a validated, efficient,
and scalable solution for automatically assessing OB
volumes.},
keywords = {Adult / Aged / Deep Learning / Female / Humans / Image
Processing, Computer-Assisted: methods / Magnetic Resonance
Imaging: methods / Male / Middle Aged / Neural Networks,
Computer / Olfactory Bulb: diagnostic imaging /
Convolutional neural networks (Other) / Deep learning
(Other) / Olfactory bulb (Other) / Semantic segmentation
(Other)},
cin = {AG Breteler / AG Reuter / AG Stöcker},
ddc = {610},
cid = {I:(DE-2719)1012001 / I:(DE-2719)1040310 /
I:(DE-2719)1013026},
pnm = {354 - Disease Prevention and Healthy Aging (POF4-354)},
pid = {G:(DE-HGF)POF4-354},
experiment = {EXP:(DE-2719)Rhineland Study-20190321},
typ = {PUB:(DE-HGF)16},
pubmed = {pmid:34389442},
pmc = {pmc:PMC8473894},
doi = {10.1016/j.neuroimage.2021.118464},
url = {https://pub.dzne.de/record/162724},
}