@inproceedings{24d65ccf203a47249b571abf2f02c88a,
title = "Head rotation data extraction from virtual reality gameplay using non-individualized HRTFs",
abstract = "A game was created to analyze the subject{\textquoteright}s head rotation to give accurate data about the process of localizing a sound in a 360-degree sphere in a VR gameplay. In this game, the subjects are asked to locate a series of sounds that are randomly placed in a sphere around their heads using generalized HRTFs. The only instruction given to the subjects is that they need to locate the sounds as fast and accurate as possible by looking at where the sound was and then pressing a trigger. To test this tool, 16 subjects were used. It showed that the average time that it took the subjects to locate the sound was 3.7±1.8 seconds. The average error in accuracy was 15.4 degrees. The average time that it took the subjects to start moving their head was 0.2 seconds approximately. The average rotation speed achieved its maximum value at 0.8 seconds and the average speed at this point was approximately 102 degrees per second.",
author = "Calle, {Juan Simon} and Agnieszka Roginska",
note = "Funding Information: This research was conducted using funding from the Steinhardt School of Culture, Education, and Human Development Challenge Grant. Publisher Copyright: {\textcopyright} 143rd Audio Engineering Society Convention 2017, AES 2017. All rights reserved.; 143rd Audio Engineering Society Convention 2017, AES 2017 ; Conference date: 18-10-2017 Through 20-10-2017",
year = "2017",
language = "English (US)",
series = "143rd Audio Engineering Society Convention 2017, AES 2017",
publisher = "Audio Engineering Society",
pages = "718--726",
booktitle = "143rd Audio Engineering Society Convention 2017, AES 2017",
address = "United States",
}