@inproceedings{d045bca278544949b6a473c0ac2541fb,
title = "TapGazer: Text Entry with Finger Tapping and Gaze-directed Word Selection",
abstract = "While using VR, efficient text entry is a challenge: users cannot easily locate standard physical keyboards, and keys are often out of reach, e.g. when standing. We present TapGazer, a text entry system where users type by tapping their fingers in place. Users can tap anywhere as long as the identity of each tapping finger can be detected with sensors. Ambiguity between different possible input words is resolved by selecting target words with gaze. If gaze tracking is unavailable, ambiguity is resolved by selecting target words with additional taps. We evaluated TapGazer for seated and standing VR: seated novice users using touchpads as tap surfaces reached 44.81 words per minute (WPM), 79.17% of their QWERTY typing speed. Standing novice users tapped on their thighs with touch-sensitive gloves, reaching 45.26 WPM (71.91%). We analyze TapGazer with a theoretical performance model and discuss its potential for text input in future AR scenarios.",
keywords = "Eye Tracking, Input Techniques, Text Entry, Typing, Virtual Reality",
author = "Zhenyi He and Christof Lutteroth and Ken Perlin",
note = "Publisher Copyright: {\textcopyright} 2022 ACM.; 2022 CHI Conference on Human Factors in Computing Systems, CHI 2022 ; Conference date: 30-04-2022 Through 05-05-2022",
year = "2022",
month = apr,
day = "29",
doi = "10.1145/3491102.3501838",
language = "English (US)",
series = "Conference on Human Factors in Computing Systems - Proceedings",
publisher = "Association for Computing Machinery",
booktitle = "CHI 2022 - Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems",
}