@inproceedings{b7a1414e415446d2b7294e8d3eed80f6,
title = "Visualizing gaze information from multiple students to support remote instruction",
abstract = "Technologically-mediated learning environments are becoming increasingly popular, however remote learning still lacks many of the important interpersonal features that are leveraged in effective co-located learning. Recent work has started to build in non-verbal cues to support remote collaboration, such as showing pairs where their partner is looking on the screen. This method of displaying gaze visualizations has been shown to support coordination and learning in remote collaborative tasks. However, we have yet to explore how this technique scales to support multiple students with one teacher in a technology-mediated learning environment. In this study, we design and evaluate a system for displaying real time gaze information from multiple students to a single teacher{\textquoteright}s display during a computer science studio session. Our results suggest that multiple gaze visualizations can improve the teaching experience in remote settings. Further, we provide design recommendations for future systems based on our preliminary results.",
keywords = "Collaboration, Eye-tracking, Gaze visualizations, Learning",
author = "Nancy Yao and Jeff Brewer and Sarah D{\textquoteright}Angelo and Michael Horn and Darren Gergle",
note = "Publisher Copyright: Copyright held by the owner/author(s).; 2018 CHI Conference on Human Factors in Computing Systems, CHI EA 2018 ; Conference date: 21-04-2018 Through 26-04-2018",
year = "2018",
month = apr,
day = "20",
doi = "10.1145/3170427.3188453",
language = "English (US)",
series = "Conference on Human Factors in Computing Systems - Proceedings",
publisher = "Association for Computing Machinery",
booktitle = "CHI 2018 - Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems",
}