@InProceedings{khamis2018dronet, author = {Mohamed Khamis and Anna Kienle and Florian Alt and Andreas Bulling}, booktitle = {{4th ACM Workshop on Micro Aerial Vehicle Networks, Systems, and Applications}}, title = {{GazeDrone: Mobile Eye-Based Interaction in Public Space Without Augmenting the User}}, year = {2018}, address = {New York, NY, USA}, month = {June}, note = {khamis2018dronet}, pages = {66--71}, publisher = {Association for Computing Machinery}, series = {DroNet'18}, abstract = {Gaze interaction holds a lot of promise for seamless human-computer interaction. At the same time, current wearable mobile eye trackers require user augmentation that negatively impacts natural user behavior while remote trackers require users to position themselves within a confined tracking range. We present GazeDrone, the first system that combines a camera-equipped aerial drone with a computational method to detect sidelong glances for spontaneous (calibration-free) gaze-based interaction with surrounding pervasive systems (e.g., public displays). GazeDrone does not require augmenting each user with on-body sensors and allows interaction from arbitrary positions, even while moving. We demonstrate that drone-supported gaze interaction is feasible and accurate for certain movement types. It is well-perceived by users, in particular while interacting from a fixed position as well as while moving orthogonally or diagonally to a display. We present design implications and discuss opportunities and challenges for drone-supported gaze interaction in public.}, doi = {10.1145/3213526.3213539}, keywords = {Active eye tracking, drones, gaze interaction, UAV.}, timestamp = {2018.09.05}, url = {http://www.florian-alt.org/unibw/wp-content/publications/khamis2018dronet}, }