@inproceedings{penzkofer2021conan, title = {ConAn: A Usable Tool for Multimodal Conversation Analysis}, author = {Anna Penzkofer and Philipp M\"{u}ller and Felix B\"{u}hler and Sven Mayer and Andreas Bulling}, year = {2021}, booktitle = {Proceedings of the 2021 International Conference on Multimodal Interaction}, series = {ICMI'21}, doi = {10.1145/3462244.3479886}, url = {https://sven-mayer.com/wp-content/uploads/2022/02/penzkofer2021conan.pdf}, date = {2021-10-18}, abstract = {Multimodal analysis of group behavior is a key task in human-computer interaction, and in the social and behavioral sciences, but is often limited to more easily controllable laboratory settings or requires elaborate multi-sensor setups and time-consuming manual data annotation. We presentConAn- a usable tool to explore and automatically analyze non-verbal behavior of multiple persons during natural group conversations. In contrast to traditional multi-sensor setups, our tool only requires a single 360°cameraand uses state-of-the-art computer vision methods to automatically extract behavioral indicators, such as gaze direction, facial expressions, and speaking activity. As such, our tool allows for easy and fast deployment and supports researchers in understanding individual behavior, group interaction dynamics, and in quantifying user-object interactions. We illustrate the benefits ofConAnonthree sample use cases: conversation analysis, assessment of collaboration quality, and impact of technology on audience behavior. Taken together, ConAn represents an important step towards democratizing automatic conversation analysis in HCI and beyond.}, keywords = {conversation analysis, multimodal} }