@inproceedings{Mayer:2019:SweetSpot, abstract = {With smartphones being a prime example, touchscreens became one of the most widely used interface to interact with computing systems. Compared to other touchscreen devices, smartphones pose additional challenges as the hand that interacts with the device is commonly used to also hold the device. Consequently, determining how fingers of the hand holding the device can interact with the screen is a non-trivial challenge. A body of recent work investigated the comfortable area in controlled lab studies. This poses limitations as it is based on the assumption that the grips used in the studies are representative for normal smartphone use. In this paper, we extend previous work by providing insights from in-the-wild studies using two different apps that were deployed in the Android App Store. Comparing our results with previous work we confirm that our data fits previously proposed models. Further analyzing the data, we highlight the sweet spot, the position that is touched if the input can be performed on the whole screen.}, author = {Sven Mayer and Huy Viet Le and Markus Funk and Niels Henze}, booktitle = {Proceedings of the 2019 ACM International Conference on Interactive Surfaces and Spaces}, date = {2019-11-10}, doi = {http://dx.doi.org/10.1145/3343055.3359705}, keywords = {ergonomics, mobile device}, pubstate = {published}, series = {ISS'19}, title = {Finding the Sweet Spot: Analyzing Unrestricted Touchscreen Interaction In-the-Wild}, tppubtype = {inproceedings}, url = {http://sven-mayer.com/wp-content/uploads/2019/09/mayer2019sweetspot.pdf https://www.youtube.com/watch?v=MirqESUmmp4}, year = {2019} }