% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Kulvicius:275937,
      author       = {Kulvicius, Tomas and Zhang, Dajie and Poustka, Luise and
                      Bölte, Sven and Jahn, Lennart and Flügge, Sarah and Kraft,
                      Marc and Zweckstetter, Markus and Nielsen-Saines, Karin and
                      Wörgötter, Florentin and Marschik, Peter B},
      title        = {{D}eep learning empowered sensor fusion boosts infant
                      movement classification.},
      journal      = {Communications medicine},
      volume       = {5},
      number       = {1},
      issn         = {2730-664X},
      address      = {[London]},
      publisher    = {Springer Nature},
      reportid     = {DZNE-2025-00159},
      pages        = {16},
      year         = {2025},
      abstract     = {To assess the integrity of the developing nervous system,
                      the Prechtl general movement assessment (GMA) is recognized
                      for its clinical value in diagnosing neurological
                      impairments in early infancy. GMA has been increasingly
                      augmented through machine learning approaches intending to
                      scale-up its application, circumvent costs in the training
                      of human assessors and further standardize classification of
                      spontaneous motor patterns. Available deep learning tools,
                      all of which are based on single sensor modalities, are
                      however still considerably inferior to that of well-trained
                      human assessors. These approaches are hardly comparable as
                      all models are designed, trained and evaluated on
                      proprietary/silo-data sets.With this study we propose a
                      sensor fusion approach for assessing fidgety movements
                      (FMs). FMs were recorded from 51 typically developing
                      participants. We compared three different sensor modalities
                      (pressure, inertial, and visual sensors). Various
                      combinations and two sensor fusion approaches (late and
                      early fusion) for infant movement classification were tested
                      to evaluate whether a multi-sensor system outperforms single
                      modality assessments. Convolutional neural network (CNN)
                      architectures were used to classify movement patterns.The
                      performance of the three-sensor fusion (classification
                      accuracy of $94.5\%)$ is significantly higher than that of
                      any single modality evaluated.We show that the sensor fusion
                      approach is a promising avenue for automated classification
                      of infant motor patterns. The development of a robust sensor
                      fusion system may significantly enhance AI-based early
                      recognition of neurofunctions, ultimately facilitating
                      automated early detection of neurodevelopmental conditions.},
      cin          = {AG Zweckstetter},
      cid          = {I:(DE-2719)1410001},
      pnm          = {352 - Disease Mechanisms (POF4-352)},
      pid          = {G:(DE-HGF)POF4-352},
      typ          = {PUB:(DE-HGF)16},
      pubmed       = {pmid:39809877},
      pmc          = {pmc:PMC11733215},
      doi          = {10.1038/s43856-024-00701-w},
      url          = {https://pub.dzne.de/record/275937},
}