% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Schmitz:285366,
author = {Schmitz, Lio and Plack, Markus and Koyak, Berkan and Ullah,
Ehsan and Aziz, Ahmad and Klein, Reinhard and Lähner, Zorah
and Dröge, Hannah},
title = {{T}owards {A}utomated {A}nalysis of {G}aze {B}ehavior from
{C}onsumer {VR} {D}evices for {N}eurological {D}iagnosis},
publisher = {WORLD SCIENTIFIC},
reportid = {DZNE-2026-00230},
pages = {219-235},
year = {2025},
note = {Missing Journal: Pac Symp Biocomput = 2335-6928 (import
from CrossRef Conference, PubMed, , Journals: pub.dzne.de)},
comment = {Biocomputing 2026 : [Proceedings] - WORLD SCIENTIFIC, 2025.
- ISBN 978-981-98-2474-8978-981-98-2475-5 -
$doi:10.1142/9789819824755_0016$},
booktitle = {Biocomputing 2026 : [Proceedings] -
WORLD SCIENTIFIC, 2025. - ISBN
978-981-98-2474-8978-981-98-2475-5 -
$doi:10.1142/9789819824755_0016$},
abstract = {Recent studies have demonstrated that eye tracking is a
valuable tool in the detection, classification and staging
of neurodegenerative diseases such as Parkinson's Disease
(PD). However, traditional methods for capturing gaze data
often rely on expensive and non-engaging clinical equipment
such as video-oculography, limiting their accessibility and
scalability. In this work, we investigate the feasibility of
using eye tracking data collected via consumer-grade virtual
reality (VR) headsets to support neurological diagnostics in
a more accessible and user-friendly manner.This approach
enables large-scale, low-cost, and remote assessments, which
are particularly valuable in early detection and monitoring
of neurodegenerative conditions. We show that relevant
oculomotor features extracted from VR-based eye tracking can
be used for predictive assessment. Despite the inherent
noise and lower precision of consumer devices, careful
preprocessing and robust feature engineering, including deep
learning embeddings, mitigate these limitations. Our results
demonstrate that both handcrafted and learned features from
gaze behavior enable promising levels of classification
performance. This research represents an important step
towards scalable, automated, and accessible diagnostic tools
for neurodegenerative diseases using ubiquitous VR
technology.},
month = {Jan},
date = {2026-01-03},
organization = {Pacific Symposium on Biocomputing
2026, Kohala Coast (Hawaii), 3 Jan 2026
- 7 Jan 2026},
cin = {AG Aziz},
cid = {I:(DE-2719)5000071},
pnm = {354 - Disease Prevention and Healthy Aging (POF4-354)},
pid = {G:(DE-HGF)POF4-354},
typ = {PUB:(DE-HGF)8 / PUB:(DE-HGF)7},
doi = {10.1142/9789819824755_0016},
url = {https://pub.dzne.de/record/285366},
}