@inproceedings{Mayer:2020:IHA, abstract = {Collaborative Virtual Environments (CVEs) offer unique opportunities for human communication. Humans can interact with each other over a distance in any environment and visual embodiment they want. Although deictic gestures are especially important as they can guide other humans' attention, humans make systematic errors when using and interpreting them. Recent work suggests that the interpretation of vertical deictic gestures can be significantly improved by warping the pointing arm. In this paper, we extend previous work by showing that models enable to also improve the interpretation of deictic gestures at targets all around the user. Through a study with 28 participants in a CVE, we analyzed the errors users make when interpreting deictic gestures. We derived a model that rotates the arm of a pointing user's avatar to improve the observing users' accuracy. A second study with 24 participants shows that we can improve observers' accuracy by 22.9%. As our approach is not noticeable for users, it improves their accuracy without requiring them to learn a new interaction technique or distracting from the experience.}, address = {New York, NY, USA}, author = {Sven Mayer and Jens Reinhardt and Robin Schweigert and Brighten Jelke and Valentin Schwind and Katrin Wolf and Niels Henze}, booktitle = {Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems}, date = {2020-04-25}, doi = {10.1145/3313831.3376340}, keywords = {mid-air pointing, virtual environment}, publisher = {ACM}, pubstate = {published}, series = {CHI '20}, title = {Improving Humans' Ability to Interpret Deictic Gestures in Virtual Reality}, tppubtype = {inproceedings}, url = {http://sven-mayer.com/wp-content/uploads/2020/01/mayer2020deictic.pdf https://www.youtube.com/watch?v=Afi4TPzHdlM https://github.com/interactionlab/Deictic-Pointing-in-VR}, year = {2020} }