@inproceedings{10.1145/3526113.3545633, author = {Sutton, Jonathan and Langlotz, Tobias and Plopski, Alexander and Zollmann, Stefanie and Itoh, Yuta and Regenbrecht, Holger}, title = {Look over There! Investigating Saliency Modulation for Visual Guidance with Augmented Reality Glasses}, year = {2022}, isbn = {9781450393201}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3526113.3545633}, doi = {10.1145/3526113.3545633}, abstract = {Augmented Reality has traditionally been used to display digital overlays in real environments. Many AR applications such as remote collaboration, picking tasks, or navigation require highlighting physical objects for selection or guidance. These highlights use graphical cues such as outlines and arrows. Whilst effective, they greatly contribute to visual clutter, possibly occlude scene elements, and can be problematic for long-term use. Substituting those overlays, we explore saliency modulation to accentuate objects in the real environment to guide the user’s gaze. Instead of manipulating video streams, like done in perception and cognition research, we investigate saliency modulation of the real world using optical-see-through head-mounted displays. This is a new challenge, since we do not have full control over the view of the real environment. In this work we provide our specific solution to this challenge, including built prototypes and their evaluation.}, booktitle = {Proceedings of the 35th Annual ACM Symposium on User Interface Software and Technology}, articleno = {81}, numpages = {15}, keywords = {Mixed Reality, Eye tracking, Augmented Human, Computational Glasses, Vision Augmentation, Saliency Modulation, Visual Guidance, Augmented Reality, Saliency, Gaze}, location = {Bend, OR, USA}, series = {UIST '22} }