@inproceedings{Mayer:2017:FAD, abstract = {ver the last decade, a body of research investigated enriching touch actions by using finger orientation as an additional input. Beyond new interaction techniques, we envision new user interface elements to make use of the additional input information. We define the fingers orientation by the pitch, roll, and yaw on the touch surface. Determining the finger orientation is not possible using current state-of-the-art devices. As a first step, we built a system that can determine the finger orientation. We developed a working prototype with a depth camera mounted on a tablet. We conducted a study with 12 participants to record ground truth data for the index, middle, ring and little finger to evaluate the accuracy of our prototype using the PointPose algorithm to estimate the pitch and yaw of the finger. By applying 2D linear correction models, we further show a reduction of RMSE by 45.4% for pitch and 21.83% for yaw.}, address = {New York, NY, USA}, author = {Sven Mayer and Michael Mayer and Niels Henze}, booktitle = {Proceedings of the 19th International Conference on Human-Computer Interaction with Mobile Devices and Services Adjunct}, date = {2017-09-04}, doi = {10.1145/3098279.3122125}, isbn = {978-1-4503-5075-4}, keywords = {depth camera, Finger orientation, mobile device, mobile interaction, modeling, smartphone}, pages = {82:1--82:8}, publisher = {ACM}, pubstate = {published}, series = {MobileHCI'17}, title = {Feasibility Analysis of Detecting the Finger Orientation with Depth Cameras}, tppubtype = {inproceedings}, url = {http://sven-mayer.com/wp-content/uploads/2017/07/mayer2017depth.pdf}, year = {2017} }