@article{Schneegass:2016:MIA:2984931.2984937b, abstract = {Wearable computing has a huge potential to shape the way we interact with mobile devices in the future. Interaction with mobile devices is still mainly limited to visual output and tactile finger-based input. Despite the visions of next-generation mobile interaction, the hand-held form factor hinders new interaction techniques becoming commonplace. In contrast, wearable devices and sensors are intended for more continuous and close-to-body use. This makes it possible to design novel wearable-augmented mobile interaction methods - both explicit and implicit. For example, the EEG signal from a wearable breast strap could be used to identify user status and change the device state accordingly (implicit) and the optical tracking with a head-mounted camera could be used to recognize gestural input (explicit). In this paper, we outline the design space for how the existing and envisioned wearable devices and sensors could augment mobile interaction techniques. Based on designs and discussions in a recently organized workshop on the topic as well as other related work, we present an overview of this design space and highlight some use cases that underline the potential therein.}, address = {Hershey, PA, USA}, author = { Stefan Schneegass and Thomas Olsson and Sven Mayer and Kristof van Laerhoven}, date = {2016-01-01}, doi = {10.4018/IJMHCI.2016100106}, issn = {1942-390X}, journal = {Int. J. Mob. Hum. Comput. Interact.}, keywords = {design space, mobile device, mobile interaction, wearable computing}, number = {4}, pages = {104--114}, publisher = {IGI Global}, pubstate = {published}, title = {Mobile Interactions Augmented by Wearable Computing: A Design Space and Vision}, tppubtype = {article}, url = {http://dx.doi.org/10.4018/IJMHCI.2016100106 http://sven-mayer.com/wp-content/uploads/2017/03/schneegass2016designspace.pdf}, volume = {8}, year = {2016} }