@inproceedings{5b4a8f5e240c40728dbf3ab88dab9c9e,
title = "Designing real-time, continuous emotion annotation techniques for 360° VR videos",
abstract = "With the increasing availability of head-mounted displays (HMDs) that show immersive 360° VR content, it is important to understand to what extent these immersive experiences can evoke emotions. Typically to collect emotion ground truth labels, users rate videos through post-experience self-reports that are discrete in nature. However, post-stimuli self-reports are temporally imprecise, especially after watching 360° videos. In this work, we design six continuous emotion annotation techniques for the Oculus Rift HMD aimed at minimizing workload and distraction. Based on a co-design session with six experts, we contribute HaloLight and DotSize, two continuous annotation methods deemed unobtrusive and easy to understand. We discuss the next challenges for evaluating the usability of these techniques, and reliability of continuous annotations.",
keywords = "360 video, Continuous, Emotion annotation, Visualization",
author = "Tong Xue and Surjya Ghosh and Gangyi Ding and {El Ali}, Abdallah and Pablo Cesar",
note = "Virtual/online event due to COVID-19 ; 2020 ACM CHI Conference on Human Factors in Computing Systems, CHI EA 2020 ; Conference date: 25-04-2020 Through 30-04-2020",
year = "2020",
doi = "10.1145/3334480.3382895",
language = "English",
series = "Conference on Human Factors in Computing Systems - Proceedings",
publisher = "Association for Computing Machinery (ACM)",
booktitle = "CHI EA 2020 - Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems",
address = "United States",
}