@inproceedings{7c6703e87eea4be0b57af19ea5393067,
title = "Exploring the design space of an augmented display for conveying facial expressions for people with autism",
abstract = "Facial expression is considered as the most intuitive and effective way of conveying one's emotion among other nonverbal interactions.However, people with autism have limited access to this rich communication channel due to their inability to read facial expressions. To help them be aware of others' emotions, we developed a CNN-based facial expression recognition system using Microsoft Hololens and explored three different modes for displaying facial expressions of a conversation partner varying the levels of explicitness. Subjective feedback from a preliminary study with 6 pilot participants suggests that each mode is worth investigating for serving people with various needs and preferences who wish to receive augmented visual hints on others' emotion.",
keywords = "Autism, Emotion-recognition, Facial-expressions, Mixed-reality",
author = "Seunga Chung and Uran Oh",
note = "Publisher Copyright: {\textcopyright} 2019 IEEE.; null ; Conference date: 14-10-2019 Through 18-10-2019",
year = "2019",
month = oct,
doi = "10.1109/ISMAR-Adjunct.2019.00049",
language = "English",
series = "Adjunct Proceedings of the 2019 IEEE International Symposium on Mixed and Augmented Reality, ISMAR-Adjunct 2019",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "435--437",
booktitle = "Adjunct Proceedings of the 2019 IEEE International Symposium on Mixed and Augmented Reality, ISMAR-Adjunct 2019",
}