@article{chiossi2024searching, title = {Searching Across Realities: Investigating ERPs and Eye-Tracking Correlates of Visual Search in Mixed Reality}, author = {Francesco Chiossi and Ines Trautmannsheimer and Changkun Ou and Uwe Gruenefeld and Sven Mayer}, year = {2024}, journal = {IEEE Transactions on Visualization and Computer Graphics}, doi = {10.1109/TVCG.2024.3456172}, url = {https://sven-mayer.com/wp-content/uploads/2024/08/chiossi2024searching.pdf}, date = {2024-10-21}, abstract = {Mixed Reality allows us to integrate virtual and physical content into users' environments seamlessly. Yet, how this fusion affects perceptual and cognitive resources and our ability to find virtual or physical objects remains uncertain. Displaying virtual and physical information simultaneously might lead to divided attention and increased visual complexity, impacting users' visual processing, performance, and workload. In a visual search task, we asked participants to locate virtual and physical objects in Augmented Reality and Augmented Virtuality to understand the effects on performance. We evaluated search efficiency and attention allocation for virtual and physical objects using event-related potentials, fixation and saccade metrics, and behavioral measures. We found that users were more efficient in identifying objects in Augmented Virtuality, while virtual objects gained saliency in Augmented Virtuality. This suggests that visual fidelity might increase the perceptual load of the scene. Reduced amplitude in distractor positivity ERP, and fixation patterns supported improved distractor suppression and search efficiency in Augmented Virtuality. We discuss design implications for mixed reality adaptive systems based on physiological inputs for interaction.}, keywords = {mixed reality, visual search, ERP, eye-tracking, attention allocation, augmented reality, augmented virtuality, physiological data} }