@InProceedings{rivu2019cogain, author = {Sheikh Radiah Rahim Rivu AND Yasmeen Abdrabou AND Thomas Mayer AND Ken Pfeuffer AND Florian Alt}, booktitle = {{Proceedings of the 2019 ACM Symposium on Eye Tracking Research \& Applications}}, title = {{GazeButton: Enhancing Buttons with Eye Gaze Interactions}}, year = {2019}, address = {New York, NY, USA}, note = {rivu2019cogain}, publisher = {Association for Computing Machinery}, series = {COGAIN '19}, abstract = {The button is an element of a user interface to trigger an action, traditionally using click or touch. We introduce GazeButton, a novel concept extending the default button mode with advanced gaze-based interactions. During normal interaction, users can utilise this button as a universal hub for gaze-based UI shortcuts. The advantages are: 1) easy to integrate in existing UIs, 2) complementary, as users choose either gaze or manual interaction, 3) straightforward, as all features are located in one button, and 4) one button to interact with the whole screen. We explore GazeButtons for a custom-made text reading, writing, and editing tool on a multitouch tablet device. For example, this allows the text cursor position to be set as users look at the position and tap on the GazeButton, avoiding costly physical movement. Or, users can simply gaze over a part of the text that should be selected, while holding the GazeButton. We present a design space, specific application examples, and point to future button designs that become highly expressive by unifying the user's visual and manual input.}, articleno = {73}, doi = {10.1145/3317956.3318154}, isbn = {9781450367097}, keywords = {touch and gaze, text input, interaction modality}, location = {Denver, Colorado}, numpages = {7}, timestamp = {2019.06.25}, url = {http://www.florian-alt.org/unibw/wp-content/publications/rivu2019cogain.pdf}, }