@inproceedings{Mayer:2017:EFO, abstract = {In the last years, touchscreens became the most common input device for a wide range of computers. While touchscreens are truly pervasive, commercial devices reduce the richness of touch input to two-dimensional positions on the screen. Recent work proposed interaction techniques to extend the richness of the input vocabulary using the finger orientation. Approaches for determining a finger's orientation using off-the-shelf capacitive touchscreens proposed in previous work already enable compelling use cases. However, the low estimation accuracy limits the usability and restricts the usage of finger orientation to non-precise input. With this paper, we provide a ground truth data set for capacitive touch screens recorded with a high-precision motion capture system. Using this data set, we show that a Convolutional Neural Network can outperform approaches proposed in previous work. Instead of relying on hand-crafted features, we trained the model based on the raw capacitive images. Thereby we reduce the pitch error by 9.8% and the yaw error by 45.7% }, address = {Brighton, United Kingdom}, author = { Sven Mayer and Huy Viet Le and Niels Henze}, booktitle = {Proceedings of the 2017 ACM International Conference on Interactive Surfaces and Spaces}, date = {2017-10-18}, doi = {10.1145/3132272.3134130}, isbn = {978-1-4503-4691-7}, keywords = {capacitive sensing, Finger orientation, mobile device, touchscreen}, pages = {220--229}, publisher = {ACM}, pubstate = {published}, series = {ISS '17}, title = {Estimating the Finger Orientation on Capacitive Touchscreens Using Convolutional Neural Networks}, tppubtype = {inproceedings}, url = {http://sven-mayer.com/wp-content/uploads/2017/08/mayer2017orientation.pdf https://github.com/interactionlab/Capacitive-Finger-Orientation-Estimation https://www.youtube.com/watch?v=BLdynD9A23s}, year = {2017} }